184 lines
5.5 KiB
Python
184 lines
5.5 KiB
Python
from fastapi import APIRouter, HTTPException
|
|
from fastapi.responses import JSONResponse
|
|
from config import EPT_DIR, UPLOADS_DIR, ENTWINE_PATH, DATA_DIR
|
|
from services.manifest import read_manifest
|
|
from services.converter import ENTWINE_AVAILABLE, ENTWINE_PATH, run_entwine
|
|
import shutil
|
|
import subprocess
|
|
import os
|
|
|
|
router = APIRouter()
|
|
|
|
@router.get("/backend-config")
|
|
def backend_config():
|
|
"""Retourne la configuration du backend"""
|
|
import os
|
|
try:
|
|
stat = shutil.disk_usage(DATA_DIR)
|
|
disk_free_gb = round(stat.free / (1024**3), 2)
|
|
except:
|
|
disk_free_gb = "?"
|
|
|
|
return {
|
|
"entwine_available": ENTWINE_AVAILABLE,
|
|
"entwine_path": ENTWINE_PATH,
|
|
"pdal_available": shutil.which("pdal") is not None,
|
|
"disk_free_gb": disk_free_gb,
|
|
}
|
|
|
|
|
|
@router.get("/debug/{pc_id}")
|
|
def debug(pc_id: str):
|
|
out_dir = EPT_DIR / pc_id
|
|
if not out_dir.exists():
|
|
raise HTTPException(status_code=404, detail=f"ID {pc_id} non trouvé")
|
|
|
|
manifest = read_manifest(out_dir)
|
|
|
|
files = []
|
|
total_size = 0
|
|
for p in out_dir.rglob("*"):
|
|
if p.is_file():
|
|
size = p.stat().st_size
|
|
total_size += size
|
|
files.append({
|
|
"path": str(p.relative_to(out_dir)),
|
|
"size_mb": round(size / (1024 * 1024), 2),
|
|
})
|
|
|
|
entry_file = manifest.get("entry_file")
|
|
entry_exists = False
|
|
if entry_file:
|
|
entry_exists = (EPT_DIR / entry_file).exists()
|
|
|
|
return {
|
|
"pc_id": pc_id,
|
|
"exists": True,
|
|
"manifest": manifest,
|
|
"entry_exists": entry_exists,
|
|
"stats": {
|
|
"total_files": len(files),
|
|
"total_size_mb": round(total_size / (1024 * 1024), 2),
|
|
},
|
|
"files": sorted(files, key=lambda x: x["size_mb"], reverse=True)[:20],
|
|
"entwine_available": ENTWINE_AVAILABLE,
|
|
"entwine_path": ENTWINE_PATH,
|
|
}
|
|
|
|
|
|
@router.delete("/delete/{pc_id}")
|
|
def delete_pointcloud(pc_id: str):
|
|
out_dir = EPT_DIR / pc_id
|
|
if not out_dir.exists():
|
|
raise HTTPException(status_code=404, detail=f"ID {pc_id} non trouvé")
|
|
|
|
try:
|
|
for ext in [".las", ".laz", ".ply", ".xyz", ".pts"]:
|
|
original = UPLOADS_DIR / f"{pc_id}{ext}"
|
|
if original.exists():
|
|
original.unlink()
|
|
|
|
shutil.rmtree(out_dir)
|
|
return {"ok": True, "message": f"Nuage {pc_id} supprimé"}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Erreur suppression : {str(e)}")
|
|
|
|
|
|
@router.post("/crop/{pc_id}")
|
|
def crop_pointcloud(pc_id: str, box: dict):
|
|
"""
|
|
Crop le nuage de points avec PDAL.
|
|
|
|
Args:
|
|
pc_id: ID du nuage de points à cropper
|
|
box: dict avec les coordonnées de la box 3D
|
|
{"minX", "minY", "minZ", "maxX", "maxY", "maxZ"}
|
|
"""
|
|
out_dir = EPT_DIR / pc_id
|
|
if not out_dir.exists():
|
|
raise HTTPException(status_code=404, detail=f"ID {pc_id} non trouvé")
|
|
|
|
manifest = read_manifest(out_dir)
|
|
if not manifest or not manifest.get("ept_dir"):
|
|
raise HTTPException(status_code=400, detail="Manifeste invalide")
|
|
|
|
ept_dir = EPT_DIR / manifest["ept_dir"]
|
|
|
|
# Vérifier que PDAL est disponible
|
|
try:
|
|
pdal_path = subprocess.run(
|
|
["which", "pdal"],
|
|
capture_output=True, text=True, check=False
|
|
).stdout.strip()
|
|
if not pdal_path:
|
|
raise HTTPException(500, "PDAL non disponible")
|
|
except Exception:
|
|
raise HTTPException(500, "PDAL non disponible")
|
|
|
|
# Construire le pipeline PDAL pour le crop
|
|
pipeline = {
|
|
"pipeline": [
|
|
{
|
|
"type": "readers.las",
|
|
"filename": str(ept_dir / "ept.json"),
|
|
"skip_z": False,
|
|
"force_z": True
|
|
},
|
|
{
|
|
"type": "filters.crop",
|
|
"crop_box": [
|
|
box.get("minX", 0),
|
|
box.get("minY", 0),
|
|
box.get("minZ", 0),
|
|
box.get("maxX", 0),
|
|
box.get("maxY", 0),
|
|
box.get("maxZ", 0)
|
|
]
|
|
},
|
|
{
|
|
"type": "writers.ept",
|
|
"filename": str(out_dir / "cropped.las"),
|
|
"force_z": True
|
|
}
|
|
]
|
|
}
|
|
|
|
try:
|
|
result = subprocess.run(
|
|
[pdal_path, "--pipeline=JSON", "--stdin"],
|
|
input=pipeline,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=7200,
|
|
env=os.environ.copy()
|
|
)
|
|
|
|
if result.returncode != 0:
|
|
raise HTTPException(
|
|
500,
|
|
f"PDAL crop failed (code {result.returncode}):\n{result.stderr}"
|
|
)
|
|
|
|
# Convertir le fichier LAS croppé en EPT
|
|
cropped_las = out_dir / "cropped.las"
|
|
if not cropped_las.exists():
|
|
raise HTTPException(500, "Fichier LAS croppé non généré")
|
|
|
|
# Supprimer le fichier original
|
|
for ext in [".las", ".laz", ".ply", ".xyz", ".pts"]:
|
|
original = UPLOADS_DIR / f"{pc_id}{ext}"
|
|
if original.exists():
|
|
original.unlink()
|
|
|
|
# Convertir en EPT
|
|
run_entwine(cropped_las, out_dir)
|
|
|
|
return {
|
|
"ok": True,
|
|
"id": pc_id, # Même ID, le nuage a été mis à jour
|
|
"size_mb": round(cropped_las.stat().st_size / (1024 * 1024), 2),
|
|
"conversion_time_seconds": 0,
|
|
}
|
|
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Erreur crop : {str(e)}")
|