etape laptop

This commit is contained in:
2026-02-09 00:01:29 +01:00
commit 805fef0cdc
144 changed files with 15295 additions and 0 deletions

0
backend/app/__init__.py Normal file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

9
backend/app/config.py Normal file
View File

@@ -0,0 +1,9 @@
import os
from pathlib import Path
_default_data = os.path.join(os.path.dirname(os.path.dirname(__file__)), "data")
DATA_DIR = Path(os.getenv("DATA_DIR", _default_data))
DATA_DIR.mkdir(parents=True, exist_ok=True)
DATABASE_URL = os.getenv("DATABASE_URL", f"sqlite:///{DATA_DIR / 'webcarto.db'}")
MAX_UPLOAD_SIZE = int(os.getenv("MAX_UPLOAD_SIZE", 50 * 1024 * 1024)) # 50 Mo

13
backend/app/database.py Normal file
View File

@@ -0,0 +1,13 @@
from sqlmodel import SQLModel, create_engine, Session
from .config import DATABASE_URL
engine = create_engine(DATABASE_URL, echo=False)
def init_db():
SQLModel.metadata.create_all(engine)
def get_session():
with Session(engine) as session:
yield session

33
backend/app/main.py Normal file
View File

@@ -0,0 +1,33 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from starlette.formparsers import MultiPartParser
from .database import init_db
from .config import MAX_UPLOAD_SIZE
from .routes import datasets, features, images, settings
# Relever la limite de taille des parts multipart (défaut Starlette: 1 Mo)
MultiPartParser.max_part_size = MAX_UPLOAD_SIZE
app = FastAPI(title="WebCarto API", version="0.1.0")
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(datasets.router, prefix="/api")
app.include_router(features.router, prefix="/api")
app.include_router(images.router, prefix="/api")
app.include_router(settings.router, prefix="/api")
@app.on_event("startup")
def on_startup():
init_db()
@app.get("/api/health")
def health():
return {"status": "ok"}

42
backend/app/models.py Normal file
View File

@@ -0,0 +1,42 @@
from datetime import datetime, timezone
from typing import Optional
from sqlmodel import SQLModel, Field, Column
import sqlalchemy as sa
class Dataset(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
raw_filename: str
feature_count: int = 0
bbox_json: Optional[str] = None # JSON string [minLng, minLat, maxLng, maxLat]
created_at: datetime = Field(
default_factory=lambda: datetime.now(timezone.utc),
sa_column=Column(sa.DateTime(timezone=True), default=sa.func.now()),
)
class Feature(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
dataset_id: int = Field(foreign_key="dataset.id", index=True)
geometry_json: str # GeoJSON geometry as JSON string
properties_json: str # GeoJSON properties as JSON string
class FeatureVersion(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
feature_id: int = Field(foreign_key="feature.id", index=True)
before_json: str
after_json: str
timestamp: datetime = Field(
default_factory=lambda: datetime.now(timezone.utc),
sa_column=Column(sa.DateTime(timezone=True), default=sa.func.now()),
)
class MapSettings(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
center_lng: float = 2.35
center_lat: float = 48.85
zoom: float = 5.0
base_layer: str = "vector"

View File

Binary file not shown.

View File

@@ -0,0 +1,296 @@
import json
import shutil
import xml.etree.ElementTree as ET
import re
import base64
import logging
from pathlib import Path
from fastapi import APIRouter, Depends, UploadFile, File, Form, HTTPException
from sqlmodel import Session, select
from ..database import get_session
from ..models import Dataset, Feature, FeatureVersion
from ..config import DATA_DIR, MAX_UPLOAD_SIZE
from .images import extract_and_save_images, IMAGES_DIR
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/datasets", tags=["datasets"])
@router.get("")
def list_datasets(session: Session = Depends(get_session)):
datasets = session.exec(select(Dataset)).all()
results = []
for ds in datasets:
bbox = json.loads(ds.bbox_json) if ds.bbox_json else None
results.append({
"id": ds.id,
"name": ds.name,
"feature_count": ds.feature_count,
"created_at": ds.created_at.isoformat(),
"bbox": bbox,
})
return results
@router.get("/{dataset_id}")
def get_dataset(dataset_id: int, session: Session = Depends(get_session)):
ds = session.get(Dataset, dataset_id)
if not ds:
raise HTTPException(404, "Dataset non trouvé")
features = session.exec(
select(Feature).where(Feature.dataset_id == dataset_id)
).all()
bbox = json.loads(ds.bbox_json) if ds.bbox_json else None
return {
"id": ds.id,
"name": ds.name,
"feature_count": ds.feature_count,
"created_at": ds.created_at.isoformat(),
"bbox": bbox,
"raw_filename": ds.raw_filename,
"features": [
{
"id": f.id,
"geometry": json.loads(f.geometry_json),
"properties": json.loads(f.properties_json),
}
for f in features
],
}
@router.delete("/{dataset_id}")
def delete_dataset(dataset_id: int, session: Session = Depends(get_session)):
ds = session.get(Dataset, dataset_id)
if not ds:
raise HTTPException(404, "Dataset non trouvé")
# Supprimer les versions de toutes les features
features = session.exec(
select(Feature).where(Feature.dataset_id == dataset_id)
).all()
for f in features:
versions = session.exec(
select(FeatureVersion).where(FeatureVersion.feature_id == f.id)
).all()
for v in versions:
session.delete(v)
session.delete(f)
# Supprimer le dossier images
img_dir = IMAGES_DIR / str(dataset_id)
if img_dir.exists():
shutil.rmtree(img_dir)
# Supprimer le fichier raw
raw_path = DATA_DIR / "raw" / ds.raw_filename
if raw_path.exists():
raw_path.unlink()
session.delete(ds)
session.commit()
return {"ok": True}
@router.post("/import")
async def import_dataset(
file: UploadFile = File(...),
geojson: str = Form(...),
session: Session = Depends(get_session),
):
# Sauvegarder le fichier brut
raw_dir = DATA_DIR / "raw"
raw_dir.mkdir(exist_ok=True)
content = await file.read()
raw_path = raw_dir / file.filename
# Éviter les écrasements
counter = 1
while raw_path.exists():
stem = Path(file.filename).stem
suffix = Path(file.filename).suffix
raw_path = raw_dir / f"{stem}_{counter}{suffix}"
counter += 1
raw_path.write_bytes(content)
# Parser le GeoJSON
try:
fc = json.loads(geojson)
except json.JSONDecodeError:
raise HTTPException(400, "GeoJSON invalide")
if fc.get("type") != "FeatureCollection":
raise HTTPException(400, "Le JSON doit être un FeatureCollection")
features_data = fc.get("features", [])
# Calculer la bbox
bbox = _compute_bbox(features_data)
# Créer le dataset
ds = Dataset(
name=Path(file.filename).stem,
raw_filename=raw_path.name,
feature_count=len(features_data),
bbox_json=json.dumps(bbox) if bbox else None,
)
session.add(ds)
session.commit()
session.refresh(ds)
# Créer les features
for i, f_data in enumerate(features_data):
geometry = f_data.get("geometry", {})
properties = f_data.get("properties", {})
# Extraire les éventuelles images base64 inline (envoyées dans le JSON)
properties = extract_and_save_images(properties, ds.id, i)
feature = Feature(
dataset_id=ds.id,
geometry_json=json.dumps(geometry),
properties_json=json.dumps(properties),
)
session.add(feature)
session.commit()
# Si KML, extraire les images base64 depuis le fichier brut
if file.filename and file.filename.lower().endswith(".kml"):
_extract_kml_images(raw_path, ds.id, session)
bbox_out = json.loads(ds.bbox_json) if ds.bbox_json else None
return {
"id": ds.id,
"name": ds.name,
"feature_count": ds.feature_count,
"created_at": ds.created_at.isoformat(),
"bbox": bbox_out,
}
@router.post("/{dataset_id}/export")
def export_dataset(dataset_id: int, format: str = "geojson", session: Session = Depends(get_session)):
ds = session.get(Dataset, dataset_id)
if not ds:
raise HTTPException(404, "Dataset non trouvé")
features = session.exec(
select(Feature).where(Feature.dataset_id == dataset_id)
).all()
fc = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": json.loads(f.geometry_json),
"properties": json.loads(f.properties_json),
}
for f in features
],
}
from fastapi.responses import Response
return Response(
content=json.dumps(fc, ensure_ascii=False, indent=2),
media_type="application/geo+json",
headers={"Content-Disposition": f'attachment; filename="{ds.name}.geojson"'},
)
def _compute_bbox(features: list) -> list | None:
coords = []
for f in features:
_extract_coords(f.get("geometry", {}), coords)
if not coords:
return None
lngs = [c[0] for c in coords]
lats = [c[1] for c in coords]
return [min(lngs), min(lats), max(lngs), max(lats)]
def _extract_coords(geometry: dict, coords: list):
gtype = geometry.get("type", "")
coordinates = geometry.get("coordinates")
if not coordinates:
return
if gtype == "Point":
coords.append(coordinates)
elif gtype in ("MultiPoint", "LineString"):
coords.extend(coordinates)
elif gtype in ("MultiLineString", "Polygon"):
for ring in coordinates:
coords.extend(ring)
elif gtype == "MultiPolygon":
for polygon in coordinates:
for ring in polygon:
coords.extend(ring)
elif gtype == "GeometryCollection":
for g in geometry.get("geometries", []):
_extract_coords(g, coords)
def _extract_kml_images(kml_path: Path, dataset_id: int, session: Session):
"""Extraire les images base64 des gx:imageUrl du fichier KML brut
et les associer aux features correspondantes par index de Placemark."""
try:
tree = ET.parse(kml_path)
except ET.ParseError as e:
logger.warning(f"Impossible de parser le KML {kml_path}: {e}")
return
root = tree.getroot()
ns = {
"kml": "http://www.opengis.net/kml/2.2",
"gx": "http://www.google.com/kml/ext/2.2",
}
placemarks = root.findall(".//kml:Placemark", ns)
features = session.exec(
select(Feature).where(Feature.dataset_id == dataset_id)
).all()
if len(placemarks) != len(features):
logger.warning(
f"KML {kml_path}: {len(placemarks)} placemarks vs {len(features)} features, "
"extraction images par index impossible"
)
return
img_dir = IMAGES_DIR / str(dataset_id)
img_dir.mkdir(parents=True, exist_ok=True)
data_uri_re = re.compile(r"data:image/(\w+);base64,(.+)", re.DOTALL)
for i, (pm, feature) in enumerate(zip(placemarks, features)):
image_urls = pm.findall(".//gx:imageUrl", ns)
if not image_urls:
continue
saved = []
for j, img_el in enumerate(image_urls):
data_uri = (img_el.text or "").strip()
match = data_uri_re.match(data_uri)
if not match:
continue
ext = match.group(1)
if ext == "jpeg":
ext = "jpg"
b64_data = match.group(2)
try:
raw = base64.b64decode(b64_data)
filename = f"{i}_{j}.{ext}"
(img_dir / filename).write_bytes(raw)
saved.append(f"/api/images/{dataset_id}/{filename}")
except Exception as e:
logger.warning(f"Erreur décodage image placemark {i} img {j}: {e}")
continue
if saved:
props = json.loads(feature.properties_json)
existing = props.get("_images", [])
props["_images"] = existing + saved
feature.properties_json = json.dumps(props)
session.add(feature)
session.commit()

View File

@@ -0,0 +1,96 @@
import json
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from typing import Optional
from sqlmodel import Session, select
from ..database import get_session
from ..models import Dataset, Feature, FeatureVersion
from .images import IMAGES_DIR
router = APIRouter(prefix="/features", tags=["features"])
class FeatureUpdate(BaseModel):
geometry: Optional[dict] = None
properties: Optional[dict] = None
@router.put("/{feature_id}")
def update_feature(
feature_id: int,
data: FeatureUpdate,
session: Session = Depends(get_session),
):
feature = session.get(Feature, feature_id)
if not feature:
raise HTTPException(404, "Feature non trouvée")
before = {
"geometry": json.loads(feature.geometry_json),
"properties": json.loads(feature.properties_json),
}
if data.geometry is not None:
feature.geometry_json = json.dumps(data.geometry)
if data.properties is not None:
feature.properties_json = json.dumps(data.properties)
after = {
"geometry": json.loads(feature.geometry_json),
"properties": json.loads(feature.properties_json),
}
# Sauvegarder la version
version = FeatureVersion(
feature_id=feature_id,
before_json=json.dumps(before),
after_json=json.dumps(after),
)
session.add(version)
session.add(feature)
session.commit()
session.refresh(feature)
# Compter les versions pour ce feature
from sqlmodel import select, func
count = session.exec(
select(func.count()).where(FeatureVersion.feature_id == feature_id)
).one()
return {"id": feature.id, "version": count}
@router.delete("/{feature_id}")
def delete_feature(
feature_id: int,
session: Session = Depends(get_session),
):
feature = session.get(Feature, feature_id)
if not feature:
raise HTTPException(404, "Feature non trouvée")
# Supprimer les versions
versions = session.exec(
select(FeatureVersion).where(FeatureVersion.feature_id == feature_id)
).all()
for v in versions:
session.delete(v)
# Supprimer les fichiers images associés
props = json.loads(feature.properties_json)
for img_url in props.get("_images", []):
if img_url.startswith("/api/images/"):
filename = img_url.split("/")[-1]
filepath = IMAGES_DIR / str(feature.dataset_id) / filename
if filepath.exists():
filepath.unlink()
# Décrémenter le compteur du dataset
dataset = session.get(Dataset, feature.dataset_id)
if dataset:
dataset.feature_count = max(0, dataset.feature_count - 1)
session.add(dataset)
session.delete(feature)
session.commit()
return {"ok": True}

View File

@@ -0,0 +1,141 @@
import json
import base64
import re
import uuid
from pathlib import Path
from fastapi import APIRouter, Depends, UploadFile, File, HTTPException
from fastapi.responses import FileResponse
from sqlmodel import Session
from ..database import get_session
from ..models import Feature
from ..config import DATA_DIR
router = APIRouter(prefix="/images", tags=["images"])
IMAGES_DIR = DATA_DIR / "images"
@router.get("/{dataset_id}/{filename}")
def get_image(dataset_id: int, filename: str):
"""Servir une image stockée."""
path = IMAGES_DIR / str(dataset_id) / filename
if not path.exists() or not path.is_file():
raise HTTPException(404, "Image non trouvée")
# Sécurité : vérifier que le chemin résolu est bien dans IMAGES_DIR
if not path.resolve().is_relative_to(IMAGES_DIR.resolve()):
raise HTTPException(403, "Accès interdit")
media_type = "image/jpeg"
if filename.endswith(".png"):
media_type = "image/png"
elif filename.endswith(".webp"):
media_type = "image/webp"
return FileResponse(path, media_type=media_type)
@router.post("/features/{feature_id}")
async def upload_image(
feature_id: int,
file: UploadFile = File(...),
session: Session = Depends(get_session),
):
"""Uploader une nouvelle image pour une feature."""
feature = session.get(Feature, feature_id)
if not feature:
raise HTTPException(404, "Feature non trouvée")
props = json.loads(feature.properties_json)
images = props.get("_images", [])
# Sauvegarder le fichier
img_dir = IMAGES_DIR / str(feature.dataset_id)
img_dir.mkdir(parents=True, exist_ok=True)
ext = Path(file.filename).suffix or ".jpg"
filename = f"{feature_id}_{uuid.uuid4().hex[:8]}{ext}"
filepath = img_dir / filename
content = await file.read()
filepath.write_bytes(content)
# Ajouter l'URL dans les propriétés
url = f"/api/images/{feature.dataset_id}/{filename}"
images.append(url)
props["_images"] = images
feature.properties_json = json.dumps(props)
session.add(feature)
session.commit()
return {"url": url, "images": images}
@router.delete("/features/{feature_id}/{filename}")
def delete_image(
feature_id: int,
filename: str,
session: Session = Depends(get_session),
):
"""Supprimer une image d'une feature."""
feature = session.get(Feature, feature_id)
if not feature:
raise HTTPException(404, "Feature non trouvée")
props = json.loads(feature.properties_json)
images = props.get("_images", [])
# Trouver et supprimer l'URL correspondante
url = f"/api/images/{feature.dataset_id}/{filename}"
if url not in images:
raise HTTPException(404, "Image non trouvée dans cette feature")
images.remove(url)
props["_images"] = images
feature.properties_json = json.dumps(props)
session.add(feature)
session.commit()
# Supprimer le fichier
filepath = IMAGES_DIR / str(feature.dataset_id) / filename
if filepath.exists() and filepath.resolve().is_relative_to(IMAGES_DIR.resolve()):
filepath.unlink()
return {"images": images}
def extract_and_save_images(properties: dict, dataset_id: int, feature_index: int) -> dict:
"""Extraire les images base64 des propriétés et les sauvegarder en fichiers.
Les data URIs dans _images sont remplacées par des URLs /api/images/...
"""
images = properties.get("_images", [])
if not images:
return properties
img_dir = IMAGES_DIR / str(dataset_id)
img_dir.mkdir(parents=True, exist_ok=True)
saved_urls = []
for i, img in enumerate(images):
if isinstance(img, str) and img.startswith("data:image"):
# Extraire le base64
match = re.match(r"data:image/(\w+);base64,(.+)", img, re.DOTALL)
if match:
ext = match.group(1)
if ext == "jpeg":
ext = "jpg"
b64_data = match.group(2)
try:
raw = base64.b64decode(b64_data)
filename = f"{feature_index}_{i}.{ext}"
filepath = img_dir / filename
filepath.write_bytes(raw)
saved_urls.append(f"/api/images/{dataset_id}/{filename}")
except Exception:
continue
elif isinstance(img, str) and img.startswith("/api/images/"):
# Déjà une URL serveur
saved_urls.append(img)
elif isinstance(img, str) and img.startswith("http"):
# URL externe, garder telle quelle
saved_urls.append(img)
properties["_images"] = saved_urls
return properties

View File

@@ -0,0 +1,62 @@
from fastapi import APIRouter, Depends
from pydantic import BaseModel
from typing import Optional
from sqlmodel import Session
from ..database import get_session
from ..models import MapSettings
router = APIRouter(prefix="/settings", tags=["settings"])
class MapSettingsUpdate(BaseModel):
center_lng: Optional[float] = None
center_lat: Optional[float] = None
zoom: Optional[float] = None
base_layer: Optional[str] = None
@router.get("/map")
def get_map_settings(session: Session = Depends(get_session)):
settings = session.get(MapSettings, 1)
if not settings:
return {
"center_lng": 2.35,
"center_lat": 48.85,
"zoom": 5.0,
"base_layer": "vector",
}
return {
"center_lng": settings.center_lng,
"center_lat": settings.center_lat,
"zoom": settings.zoom,
"base_layer": settings.base_layer,
}
@router.put("/map")
def save_map_settings(
data: MapSettingsUpdate,
session: Session = Depends(get_session),
):
settings = session.get(MapSettings, 1)
if not settings:
settings = MapSettings(id=1)
if data.center_lng is not None:
settings.center_lng = data.center_lng
if data.center_lat is not None:
settings.center_lat = data.center_lat
if data.zoom is not None:
settings.zoom = data.zoom
if data.base_layer is not None:
settings.base_layer = data.base_layer
session.add(settings)
session.commit()
session.refresh(settings)
return {
"center_lng": settings.center_lng,
"center_lat": settings.center_lat,
"zoom": settings.zoom,
"base_layer": settings.base_layer,
}