Upload files to "backend/routes"
This commit is contained in:
commit
7eb31f4834
3 changed files with 284 additions and 0 deletions
184
backend/routes/admin.py
Normal file
184
backend/routes/admin.py
Normal file
|
|
@ -0,0 +1,184 @@
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from config import EPT_DIR, UPLOADS_DIR, ENTWINE_PATH, DATA_DIR
|
||||||
|
from services.manifest import read_manifest
|
||||||
|
from services.converter import ENTWINE_AVAILABLE, ENTWINE_PATH, run_entwine
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.get("/backend-config")
|
||||||
|
def backend_config():
|
||||||
|
"""Retourne la configuration du backend"""
|
||||||
|
import os
|
||||||
|
try:
|
||||||
|
stat = shutil.disk_usage(DATA_DIR)
|
||||||
|
disk_free_gb = round(stat.free / (1024**3), 2)
|
||||||
|
except:
|
||||||
|
disk_free_gb = "?"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"entwine_available": ENTWINE_AVAILABLE,
|
||||||
|
"entwine_path": ENTWINE_PATH,
|
||||||
|
"pdal_available": shutil.which("pdal") is not None,
|
||||||
|
"disk_free_gb": disk_free_gb,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/debug/{pc_id}")
|
||||||
|
def debug(pc_id: str):
|
||||||
|
out_dir = EPT_DIR / pc_id
|
||||||
|
if not out_dir.exists():
|
||||||
|
raise HTTPException(status_code=404, detail=f"ID {pc_id} non trouvé")
|
||||||
|
|
||||||
|
manifest = read_manifest(out_dir)
|
||||||
|
|
||||||
|
files = []
|
||||||
|
total_size = 0
|
||||||
|
for p in out_dir.rglob("*"):
|
||||||
|
if p.is_file():
|
||||||
|
size = p.stat().st_size
|
||||||
|
total_size += size
|
||||||
|
files.append({
|
||||||
|
"path": str(p.relative_to(out_dir)),
|
||||||
|
"size_mb": round(size / (1024 * 1024), 2),
|
||||||
|
})
|
||||||
|
|
||||||
|
entry_file = manifest.get("entry_file")
|
||||||
|
entry_exists = False
|
||||||
|
if entry_file:
|
||||||
|
entry_exists = (EPT_DIR / entry_file).exists()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"pc_id": pc_id,
|
||||||
|
"exists": True,
|
||||||
|
"manifest": manifest,
|
||||||
|
"entry_exists": entry_exists,
|
||||||
|
"stats": {
|
||||||
|
"total_files": len(files),
|
||||||
|
"total_size_mb": round(total_size / (1024 * 1024), 2),
|
||||||
|
},
|
||||||
|
"files": sorted(files, key=lambda x: x["size_mb"], reverse=True)[:20],
|
||||||
|
"entwine_available": ENTWINE_AVAILABLE,
|
||||||
|
"entwine_path": ENTWINE_PATH,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/delete/{pc_id}")
|
||||||
|
def delete_pointcloud(pc_id: str):
|
||||||
|
out_dir = EPT_DIR / pc_id
|
||||||
|
if not out_dir.exists():
|
||||||
|
raise HTTPException(status_code=404, detail=f"ID {pc_id} non trouvé")
|
||||||
|
|
||||||
|
try:
|
||||||
|
for ext in [".las", ".laz", ".ply", ".xyz", ".pts"]:
|
||||||
|
original = UPLOADS_DIR / f"{pc_id}{ext}"
|
||||||
|
if original.exists():
|
||||||
|
original.unlink()
|
||||||
|
|
||||||
|
shutil.rmtree(out_dir)
|
||||||
|
return {"ok": True, "message": f"Nuage {pc_id} supprimé"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Erreur suppression : {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/crop/{pc_id}")
|
||||||
|
def crop_pointcloud(pc_id: str, box: dict):
|
||||||
|
"""
|
||||||
|
Crop le nuage de points avec PDAL.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pc_id: ID du nuage de points à cropper
|
||||||
|
box: dict avec les coordonnées de la box 3D
|
||||||
|
{"minX", "minY", "minZ", "maxX", "maxY", "maxZ"}
|
||||||
|
"""
|
||||||
|
out_dir = EPT_DIR / pc_id
|
||||||
|
if not out_dir.exists():
|
||||||
|
raise HTTPException(status_code=404, detail=f"ID {pc_id} non trouvé")
|
||||||
|
|
||||||
|
manifest = read_manifest(out_dir)
|
||||||
|
if not manifest or not manifest.get("ept_dir"):
|
||||||
|
raise HTTPException(status_code=400, detail="Manifeste invalide")
|
||||||
|
|
||||||
|
ept_dir = EPT_DIR / manifest["ept_dir"]
|
||||||
|
|
||||||
|
# Vérifier que PDAL est disponible
|
||||||
|
try:
|
||||||
|
pdal_path = subprocess.run(
|
||||||
|
["which", "pdal"],
|
||||||
|
capture_output=True, text=True, check=False
|
||||||
|
).stdout.strip()
|
||||||
|
if not pdal_path:
|
||||||
|
raise HTTPException(500, "PDAL non disponible")
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(500, "PDAL non disponible")
|
||||||
|
|
||||||
|
# Construire le pipeline PDAL pour le crop
|
||||||
|
pipeline = {
|
||||||
|
"pipeline": [
|
||||||
|
{
|
||||||
|
"type": "readers.las",
|
||||||
|
"filename": str(ept_dir / "ept.json"),
|
||||||
|
"skip_z": False,
|
||||||
|
"force_z": True
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "filters.crop",
|
||||||
|
"crop_box": [
|
||||||
|
box.get("minX", 0),
|
||||||
|
box.get("minY", 0),
|
||||||
|
box.get("minZ", 0),
|
||||||
|
box.get("maxX", 0),
|
||||||
|
box.get("maxY", 0),
|
||||||
|
box.get("maxZ", 0)
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "writers.ept",
|
||||||
|
"filename": str(out_dir / "cropped.las"),
|
||||||
|
"force_z": True
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
[pdal_path, "--pipeline=JSON", "--stdin"],
|
||||||
|
input=pipeline,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=7200,
|
||||||
|
env=os.environ.copy()
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
raise HTTPException(
|
||||||
|
500,
|
||||||
|
f"PDAL crop failed (code {result.returncode}):\n{result.stderr}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convertir le fichier LAS croppé en EPT
|
||||||
|
cropped_las = out_dir / "cropped.las"
|
||||||
|
if not cropped_las.exists():
|
||||||
|
raise HTTPException(500, "Fichier LAS croppé non généré")
|
||||||
|
|
||||||
|
# Supprimer le fichier original
|
||||||
|
for ext in [".las", ".laz", ".ply", ".xyz", ".pts"]:
|
||||||
|
original = UPLOADS_DIR / f"{pc_id}{ext}"
|
||||||
|
if original.exists():
|
||||||
|
original.unlink()
|
||||||
|
|
||||||
|
# Convertir en EPT
|
||||||
|
run_entwine(cropped_las, out_dir)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"ok": True,
|
||||||
|
"id": pc_id, # Même ID, le nuage a été mis à jour
|
||||||
|
"size_mb": round(cropped_las.stat().st_size / (1024 * 1024), 2),
|
||||||
|
"conversion_time_seconds": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Erreur crop : {str(e)}")
|
||||||
47
backend/routes/upload.py
Normal file
47
backend/routes/upload.py
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
from fastapi import APIRouter, UploadFile, File, HTTPException
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import uuid, shutil, time
|
||||||
|
from pathlib import Path
|
||||||
|
from config import UPLOADS_DIR, EPT_DIR, SUPPORTED_FORMATS
|
||||||
|
from services.converter import run_entwine, ENTWINE_AVAILABLE
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.post("/upload")
|
||||||
|
async def upload(file: UploadFile = File(...)):
|
||||||
|
suffix = Path(file.filename).suffix.lower()
|
||||||
|
if suffix not in SUPPORTED_FORMATS:
|
||||||
|
raise HTTPException(400, f"Format non supporté: {suffix}")
|
||||||
|
if not ENTWINE_AVAILABLE:
|
||||||
|
raise HTTPException(500, "entwine non disponible sur ce serveur")
|
||||||
|
|
||||||
|
pc_id = str(uuid.uuid4())[:8]
|
||||||
|
upload_path = UPLOADS_DIR / f"{pc_id}{suffix}"
|
||||||
|
out_dir = EPT_DIR / pc_id
|
||||||
|
|
||||||
|
file_size = 0
|
||||||
|
with open(upload_path, "wb") as f:
|
||||||
|
while chunk := await file.read(1024 * 1024):
|
||||||
|
f.write(chunk)
|
||||||
|
file_size += len(chunk)
|
||||||
|
|
||||||
|
if file_size == 0:
|
||||||
|
upload_path.unlink()
|
||||||
|
raise HTTPException(400, "Fichier vide")
|
||||||
|
|
||||||
|
if out_dir.exists():
|
||||||
|
shutil.rmtree(out_dir, ignore_errors=True)
|
||||||
|
out_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
result = run_entwine(upload_path, out_dir)
|
||||||
|
|
||||||
|
return JSONResponse({
|
||||||
|
"id": pc_id,
|
||||||
|
"filename": file.filename,
|
||||||
|
"size_mb": round(file_size / (1024 * 1024), 2),
|
||||||
|
"viewer_path": f"/viewer/{pc_id}",
|
||||||
|
"embed_path": f"/viewer-embed/{pc_id}",
|
||||||
|
"ept_dir": result.get("ept_dir"),
|
||||||
|
"conversion_time_seconds": round(time.time() - start, 2),
|
||||||
|
})
|
||||||
53
backend/routes/viewer.py
Normal file
53
backend/routes/viewer.py
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Request
|
||||||
|
from fastapi.responses import JSONResponse, HTMLResponse
|
||||||
|
from config import EPT_DIR, POTREE_URL
|
||||||
|
import config
|
||||||
|
from services.manifest import read_manifest
|
||||||
|
from services.html_generator import generate_viewer_html
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.get("/viewer/list", response_class=HTMLResponse)
|
||||||
|
def list_pointclouds(request: Request):
|
||||||
|
"""Liste les nuages de points disponibles"""
|
||||||
|
from fastapi import Request
|
||||||
|
pointclouds = []
|
||||||
|
|
||||||
|
for item in sorted(EPT_DIR.iterdir(), key=lambda x: x.stat().st_ctime, reverse=True):
|
||||||
|
if item.is_dir():
|
||||||
|
total_size = 0
|
||||||
|
file_count = 0
|
||||||
|
for f in item.rglob("*"):
|
||||||
|
if f.is_file():
|
||||||
|
total_size += f.stat().st_size
|
||||||
|
file_count += 1
|
||||||
|
|
||||||
|
if file_count > 0:
|
||||||
|
pointclouds.append({
|
||||||
|
"id": item.name,
|
||||||
|
"size_mb": round(total_size / (1024 * 1024), 2),
|
||||||
|
"file_count": file_count,
|
||||||
|
"created": item.stat().st_ctime,
|
||||||
|
})
|
||||||
|
|
||||||
|
return request.app.state.templates.TemplateResponse(
|
||||||
|
"partials/cloud_list.html",
|
||||||
|
{"request": request, "pointclouds": pointclouds},
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/viewer/{pc_id}")
|
||||||
|
def viewer(pc_id: str, potree_url: str = Query(default=POTREE_URL, description="URL du serveur Potree")):
|
||||||
|
out_dir = EPT_DIR / pc_id
|
||||||
|
if not out_dir.exists():
|
||||||
|
raise HTTPException(404, f"ID {pc_id} non trouvé")
|
||||||
|
manifest = read_manifest(out_dir)
|
||||||
|
return HTMLResponse(generate_viewer_html(pc_id, manifest.get("ept_dir"), embed=False, potree_url=potree_url))
|
||||||
|
|
||||||
|
@router.get("/viewer-embed/{pc_id}")
|
||||||
|
def viewer_embed(pc_id: str, potree_url: str = Query(default=POTREE_URL, description="URL du serveur Potree")):
|
||||||
|
out_dir = EPT_DIR / pc_id
|
||||||
|
if not out_dir.exists():
|
||||||
|
raise HTTPException(404, f"ID {pc_id} non trouvé")
|
||||||
|
manifest = read_manifest(out_dir)
|
||||||
|
return HTMLResponse(generate_viewer_html(pc_id, manifest.get("ept_dir"), embed=True, potree_url=potree_url))
|
||||||
Loading…
Add table
Add a link
Reference in a new issue