991 lines
34 KiB
Python
991 lines
34 KiB
Python
"""
|
||
API routes for Brace Generator.
|
||
|
||
Note: S3 operations are handled by the Lambda function.
|
||
This server only handles ML inference and returns local file paths.
|
||
"""
|
||
import torch
|
||
from fastapi import APIRouter, HTTPException, UploadFile, File, Form, Request
|
||
from fastapi.responses import FileResponse
|
||
from typing import Optional
|
||
import json
|
||
from pathlib import Path
|
||
|
||
from .schemas import (
|
||
AnalysisResult, HealthResponse, ExperimentType, BraceConfigRequest
|
||
)
|
||
from .config import config
|
||
|
||
|
||
router = APIRouter()
|
||
|
||
|
||
@router.get("/", summary="Root endpoint")
|
||
async def root():
|
||
"""Welcome endpoint."""
|
||
return {
|
||
"service": "Brace Generator API",
|
||
"version": "1.0.0",
|
||
"docs": "/docs",
|
||
"health": "/health"
|
||
}
|
||
|
||
|
||
@router.get("/health", response_model=HealthResponse, summary="Health check")
|
||
async def health_check():
|
||
"""Check server health and GPU status."""
|
||
cuda_available = torch.cuda.is_available()
|
||
|
||
gpu_name = None
|
||
gpu_memory_mb = None
|
||
if cuda_available:
|
||
gpu_name = torch.cuda.get_device_name(0)
|
||
gpu_memory_mb = int(torch.cuda.get_device_properties(0).total_memory / (1024**2))
|
||
|
||
return HealthResponse(
|
||
status="healthy",
|
||
device=config.get_device(),
|
||
cuda_available=cuda_available,
|
||
model_loaded=True,
|
||
gpu_name=gpu_name,
|
||
gpu_memory_mb=gpu_memory_mb
|
||
)
|
||
|
||
|
||
@router.post("/analyze/upload", response_model=AnalysisResult, summary="Analyze uploaded X-ray")
|
||
async def analyze_upload(
|
||
req: Request,
|
||
file: UploadFile = File(..., description="X-ray image file"),
|
||
case_id: Optional[str] = Form(None, description="Case ID"),
|
||
experiment: str = Form("experiment_3", description="Experiment type"),
|
||
config_json: Optional[str] = Form(None, description="Brace config as JSON"),
|
||
landmarks_json: Optional[str] = Form(None, description="Pre-computed landmarks with manual edits")
|
||
):
|
||
"""
|
||
Analyze an uploaded X-ray image and generate brace.
|
||
|
||
This endpoint accepts multipart/form-data for direct file upload.
|
||
Returns analysis results with local file paths that can be downloaded
|
||
via the /download endpoint.
|
||
|
||
If landmarks_json is provided, it will use those landmarks (with manual edits)
|
||
instead of re-running automatic detection. This allows manual corrections
|
||
to be incorporated into the brace generation.
|
||
|
||
The Lambda function is responsible for:
|
||
1. Downloading the X-ray from S3
|
||
2. Calling this endpoint
|
||
3. Downloading output files via /download
|
||
4. Uploading files to S3
|
||
"""
|
||
# Validate file
|
||
if not file.filename:
|
||
raise HTTPException(status_code=400, detail="No file provided")
|
||
|
||
# Check file size
|
||
contents = await file.read()
|
||
if len(contents) > config.MAX_IMAGE_SIZE_MB * 1024 * 1024:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"File too large. Maximum size is {config.MAX_IMAGE_SIZE_MB}MB"
|
||
)
|
||
|
||
# Parse config if provided
|
||
brace_config = None
|
||
if config_json:
|
||
try:
|
||
config_data = json.loads(config_json)
|
||
brace_config = BraceConfigRequest(**config_data)
|
||
except (json.JSONDecodeError, ValueError) as e:
|
||
raise HTTPException(status_code=400, detail=f"Invalid config: {e}")
|
||
|
||
# Parse landmarks if provided (manual edits)
|
||
landmarks_data = None
|
||
if landmarks_json:
|
||
try:
|
||
landmarks_data = json.loads(landmarks_json)
|
||
except json.JSONDecodeError as e:
|
||
raise HTTPException(status_code=400, detail=f"Invalid landmarks JSON: {e}")
|
||
|
||
# Parse experiment type
|
||
try:
|
||
exp_type = ExperimentType(experiment)
|
||
except ValueError:
|
||
exp_type = ExperimentType.EXPERIMENT_3
|
||
|
||
service = req.app.state.brace_service
|
||
|
||
try:
|
||
result = await service.analyze_from_bytes(
|
||
image_data=contents,
|
||
filename=file.filename,
|
||
experiment=exp_type,
|
||
case_id=case_id,
|
||
brace_config=brace_config,
|
||
landmarks_data=landmarks_data # Pass pre-computed landmarks
|
||
)
|
||
return result
|
||
except ValueError as e:
|
||
raise HTTPException(status_code=400, detail=str(e))
|
||
except Exception as e:
|
||
raise HTTPException(status_code=500, detail=str(e))
|
||
|
||
|
||
@router.get("/download/{case_id}/{filename}", summary="Download output file")
|
||
async def download_file(case_id: str, filename: str):
|
||
"""
|
||
Download a generated output file.
|
||
|
||
This endpoint is called by the Lambda function to retrieve
|
||
generated files (STL, PLY, PNG, JSON) for upload to S3.
|
||
"""
|
||
file_path = config.TEMP_DIR / case_id / filename
|
||
|
||
if not file_path.exists():
|
||
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
|
||
|
||
# Determine media type
|
||
ext = file_path.suffix.lower()
|
||
media_types = {
|
||
".stl": "application/octet-stream",
|
||
".ply": "application/octet-stream",
|
||
".obj": "application/octet-stream",
|
||
".glb": "model/gltf-binary",
|
||
".gltf": "model/gltf+json",
|
||
".png": "image/png",
|
||
".jpg": "image/jpeg",
|
||
".jpeg": "image/jpeg",
|
||
".json": "application/json",
|
||
}
|
||
media_type = media_types.get(ext, "application/octet-stream")
|
||
|
||
return FileResponse(
|
||
path=str(file_path),
|
||
filename=filename,
|
||
media_type=media_type
|
||
)
|
||
|
||
|
||
@router.post("/extract-body-measurements", summary="Extract body measurements from 3D scan")
|
||
async def extract_body_measurements(
|
||
file: UploadFile = File(..., description="3D body scan file (STL/OBJ/PLY)")
|
||
):
|
||
"""
|
||
Extract body measurements from a 3D body scan.
|
||
|
||
Returns measurements needed for brace fitting:
|
||
- Total height
|
||
- Shoulder, chest, waist, hip widths and depths
|
||
- Circumferences
|
||
- Brace coverage region
|
||
"""
|
||
import tempfile
|
||
from pathlib import Path
|
||
|
||
try:
|
||
from server_DEV.body_integration import extract_measurements_from_scan
|
||
except ImportError as e:
|
||
raise HTTPException(status_code=500, detail=f"Body integration module not available: {e}")
|
||
|
||
# Validate file type
|
||
allowed_extensions = ['.stl', '.obj', '.ply', '.glb', '.gltf']
|
||
ext = Path(file.filename).suffix.lower() if file.filename else '.stl'
|
||
if ext not in allowed_extensions:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"Invalid file type. Allowed: {', '.join(allowed_extensions)}"
|
||
)
|
||
|
||
# Save to temp file
|
||
contents = await file.read()
|
||
with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as f:
|
||
f.write(contents)
|
||
temp_path = f.name
|
||
|
||
try:
|
||
measurements = extract_measurements_from_scan(temp_path)
|
||
return measurements
|
||
except Exception as e:
|
||
raise HTTPException(status_code=500, detail=str(e))
|
||
finally:
|
||
# Cleanup
|
||
Path(temp_path).unlink(missing_ok=True)
|
||
|
||
|
||
@router.post("/generate-with-body", summary="Generate brace with body scan fitting")
|
||
async def generate_with_body_scan(
|
||
req: Request,
|
||
xray_file: UploadFile = File(..., description="X-ray image"),
|
||
body_scan_file: UploadFile = File(..., description="3D body scan (STL/OBJ/PLY)"),
|
||
case_id: Optional[str] = Form(None, description="Case ID"),
|
||
landmarks_json: Optional[str] = Form(None, description="Pre-computed landmarks"),
|
||
clearance_mm: float = Form(8.0, description="Shell clearance in mm"),
|
||
):
|
||
"""
|
||
Generate a patient-specific brace using X-ray analysis and 3D body scan.
|
||
|
||
This endpoint:
|
||
1. Analyzes X-ray to detect spine landmarks and compute Cobb angles
|
||
2. Classifies curve type using Rigo-Cheneau system
|
||
3. Fits a shell template to the 3D body scan
|
||
4. Returns STL, GLB, and visualization files
|
||
"""
|
||
import tempfile
|
||
import uuid
|
||
from pathlib import Path
|
||
|
||
try:
|
||
from server_DEV.body_integration import generate_fitted_brace, extract_measurements_from_scan
|
||
except ImportError as e:
|
||
raise HTTPException(status_code=500, detail=f"Body integration module not available: {e}")
|
||
|
||
# Generate case ID if not provided
|
||
case_id = case_id or f"case_{uuid.uuid4().hex[:8]}"
|
||
|
||
# Save files to temp directory
|
||
temp_dir = config.TEMP_DIR / case_id
|
||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||
|
||
# Save X-ray
|
||
xray_contents = await xray_file.read()
|
||
xray_ext = Path(xray_file.filename).suffix if xray_file.filename else '.jpg'
|
||
xray_path = temp_dir / f"xray{xray_ext}"
|
||
xray_path.write_bytes(xray_contents)
|
||
|
||
# Save body scan
|
||
body_contents = await body_scan_file.read()
|
||
body_ext = Path(body_scan_file.filename).suffix if body_scan_file.filename else '.stl'
|
||
body_scan_path = temp_dir / f"body_scan{body_ext}"
|
||
body_scan_path.write_bytes(body_contents)
|
||
|
||
try:
|
||
# Parse landmarks if provided
|
||
landmarks_data = None
|
||
if landmarks_json:
|
||
import json
|
||
landmarks_data = json.loads(landmarks_json)
|
||
|
||
# Step 1: Analyze X-ray to get Rigo classification (this generates the brace)
|
||
service = req.app.state.brace_service
|
||
|
||
xray_result = await service.analyze_from_bytes(
|
||
image_data=xray_contents,
|
||
filename=xray_file.filename,
|
||
experiment=ExperimentType.EXPERIMENT_3,
|
||
case_id=case_id,
|
||
landmarks_data=landmarks_data
|
||
)
|
||
|
||
rigo_type = xray_result.rigo_classification.type if xray_result.rigo_classification else "A1"
|
||
|
||
# Step 2: Try to extract body measurements (optional - EXPERIMENT_10 may not be deployed)
|
||
body_measurements = None
|
||
fitting_result = None
|
||
body_scan_error = None
|
||
|
||
try:
|
||
body_measurements = extract_measurements_from_scan(str(body_scan_path))
|
||
|
||
# Step 3: Generate fitted brace (only if measurements worked)
|
||
fitting_result = generate_fitted_brace(
|
||
body_scan_path=str(body_scan_path),
|
||
rigo_type=rigo_type,
|
||
output_dir=str(temp_dir),
|
||
case_id=case_id,
|
||
clearance_mm=clearance_mm
|
||
)
|
||
except Exception as body_err:
|
||
print(f"Warning: Body scan processing failed, using X-ray only: {body_err}")
|
||
body_scan_error = str(body_err)
|
||
|
||
# If body fitting worked, return full result
|
||
if fitting_result:
|
||
return {
|
||
"case_id": case_id,
|
||
"experiment": "experiment_10",
|
||
"model_used": xray_result.model_used,
|
||
"vertebrae_detected": xray_result.vertebrae_detected,
|
||
"cobb_angles": {
|
||
"PT": xray_result.cobb_angles.PT,
|
||
"MT": xray_result.cobb_angles.MT,
|
||
"TL": xray_result.cobb_angles.TL,
|
||
},
|
||
"curve_type": xray_result.curve_type,
|
||
"rigo_classification": {
|
||
"type": rigo_type,
|
||
"description": xray_result.rigo_classification.description if xray_result.rigo_classification else ""
|
||
},
|
||
"body_scan": {
|
||
"measurements": body_measurements,
|
||
},
|
||
"brace_fitting": fitting_result,
|
||
"outputs": {
|
||
"shell_stl": fitting_result["outputs"]["shell_stl"],
|
||
"shell_glb": fitting_result["outputs"]["shell_glb"],
|
||
"combined_stl": fitting_result["outputs"]["combined_stl"],
|
||
"visualization": fitting_result["outputs"].get("visualization"),
|
||
"feedback_json": fitting_result["outputs"]["feedback_json"],
|
||
"xray_visualization": str(xray_result.outputs.get("visualization", "")),
|
||
},
|
||
"mesh_vertices": fitting_result["mesh_stats"]["vertices"],
|
||
"mesh_faces": fitting_result["mesh_stats"]["faces"],
|
||
"processing_time_ms": xray_result.processing_time_ms,
|
||
}
|
||
|
||
# Fallback: return X-ray only result (body scan processing not available)
|
||
return {
|
||
"case_id": case_id,
|
||
"experiment": "experiment_3_fallback",
|
||
"model_used": xray_result.model_used,
|
||
"vertebrae_detected": xray_result.vertebrae_detected,
|
||
"cobb_angles": {
|
||
"PT": xray_result.cobb_angles.PT,
|
||
"MT": xray_result.cobb_angles.MT,
|
||
"TL": xray_result.cobb_angles.TL,
|
||
},
|
||
"curve_type": xray_result.curve_type,
|
||
"rigo_classification": {
|
||
"type": rigo_type,
|
||
"description": xray_result.rigo_classification.description if xray_result.rigo_classification else ""
|
||
},
|
||
"body_scan": {
|
||
"error": body_scan_error or "Body scan processing not available",
|
||
"fallback": "Using X-ray only brace generation"
|
||
},
|
||
"outputs": xray_result.outputs,
|
||
"mesh_vertices": xray_result.mesh_vertices,
|
||
"mesh_faces": xray_result.mesh_faces,
|
||
"processing_time_ms": xray_result.processing_time_ms,
|
||
}
|
||
|
||
except Exception as e:
|
||
import traceback
|
||
traceback.print_exc()
|
||
raise HTTPException(status_code=500, detail=str(e))
|
||
|
||
|
||
@router.get("/experiments", summary="List available experiments")
|
||
async def list_experiments():
|
||
"""List available brace generation experiments."""
|
||
return {
|
||
"experiments": [
|
||
{
|
||
"id": "standard",
|
||
"name": "Standard Pipeline",
|
||
"description": "Original template-based brace generation using Rigo classification"
|
||
},
|
||
{
|
||
"id": "experiment_3",
|
||
"name": "Research-Based Adaptive",
|
||
"description": "Adaptive brace generation based on Guy et al. (2024) with patch-based deformation optimization"
|
||
},
|
||
{
|
||
"id": "experiment_10",
|
||
"name": "Patient-Specific Body Fitting",
|
||
"description": "X-ray analysis + 3D body scan for precise patient-specific brace fitting"
|
||
}
|
||
],
|
||
"default": "experiment_3"
|
||
}
|
||
|
||
|
||
@router.get("/models", summary="List available detection models")
|
||
async def list_models():
|
||
"""List available landmark detection models."""
|
||
return {
|
||
"models": [
|
||
{
|
||
"id": "scoliovis",
|
||
"name": "ScolioVis",
|
||
"description": "Keypoint R-CNN model for vertebrae detection",
|
||
"supports_gpu": True
|
||
},
|
||
{
|
||
"id": "vertebra-landmark",
|
||
"name": "Vertebra-Landmark-Detection",
|
||
"description": "SpineNet-based detection (alternative)",
|
||
"supports_gpu": True
|
||
}
|
||
],
|
||
"current": config.MODEL
|
||
}
|
||
|
||
|
||
# ============================================
|
||
# NEW ENDPOINTS FOR PIPELINE DEV
|
||
# ============================================
|
||
|
||
@router.post("/detect-landmarks", summary="Detect landmarks only (Stage 1)")
|
||
async def detect_landmarks(
|
||
req: Request,
|
||
file: UploadFile = File(..., description="X-ray image file"),
|
||
case_id: Optional[str] = Form(None, description="Case ID"),
|
||
):
|
||
"""
|
||
Detect vertebrae landmarks without generating a brace.
|
||
Returns landmarks, visualization, and vertebrae_structure for manual editing.
|
||
|
||
This is Stage 1 of the pipeline - just detection, no brace generation.
|
||
"""
|
||
if not file.filename:
|
||
raise HTTPException(status_code=400, detail="No file provided")
|
||
|
||
contents = await file.read()
|
||
if len(contents) > config.MAX_IMAGE_SIZE_MB * 1024 * 1024:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"File too large. Maximum size is {config.MAX_IMAGE_SIZE_MB}MB"
|
||
)
|
||
|
||
service = req.app.state.brace_service
|
||
|
||
try:
|
||
result = await service.detect_landmarks_only(
|
||
image_data=contents,
|
||
filename=file.filename,
|
||
case_id=case_id
|
||
)
|
||
return result
|
||
except ValueError as e:
|
||
raise HTTPException(status_code=400, detail=str(e))
|
||
except Exception as e:
|
||
raise HTTPException(status_code=500, detail=str(e))
|
||
|
||
|
||
@router.post("/recalculate", summary="Recalculate Cobb/Rigo from landmarks")
|
||
async def recalculate_analysis(req: Request):
|
||
"""
|
||
Recalculate Cobb angles and Rigo classification from provided landmarks.
|
||
|
||
Use this after manual landmark editing to get updated analysis.
|
||
|
||
Request body:
|
||
{
|
||
"case_id": "case-xxx",
|
||
"landmarks": { ... vertebrae_structure from detect-landmarks ... }
|
||
}
|
||
"""
|
||
body = await req.json()
|
||
case_id = body.get("case_id")
|
||
landmarks = body.get("landmarks")
|
||
|
||
if not landmarks:
|
||
raise HTTPException(status_code=400, detail="landmarks data required")
|
||
|
||
service = req.app.state.brace_service
|
||
|
||
try:
|
||
result = await service.recalculate_from_landmarks(
|
||
landmarks_data=landmarks,
|
||
case_id=case_id
|
||
)
|
||
return result
|
||
except ValueError as e:
|
||
raise HTTPException(status_code=400, detail=str(e))
|
||
except Exception as e:
|
||
raise HTTPException(status_code=500, detail=str(e))
|
||
|
||
|
||
# =============================================================================
|
||
# GLB BRACE GENERATION WITH MARKERS
|
||
# =============================================================================
|
||
|
||
from .glb_generator import (
|
||
generate_glb_brace,
|
||
generate_both_brace_types,
|
||
list_available_templates,
|
||
calculate_pressure_zones,
|
||
load_template_markers,
|
||
AVAILABLE_RIGO_TYPES
|
||
)
|
||
|
||
|
||
@router.get("/templates", summary="List available brace templates")
|
||
async def list_templates():
|
||
"""
|
||
List all available brace templates (regular and vase types).
|
||
|
||
Returns which Rigo types have templates available.
|
||
"""
|
||
return {
|
||
"available_templates": list_available_templates(),
|
||
"rigo_types": AVAILABLE_RIGO_TYPES,
|
||
"template_types": ["regular", "vase"]
|
||
}
|
||
|
||
|
||
@router.get("/templates/{rigo_type}/markers", summary="Get template markers")
|
||
async def get_template_markers(
|
||
rigo_type: str,
|
||
template_type: str = "regular"
|
||
):
|
||
"""
|
||
Get marker positions for a specific template.
|
||
|
||
Args:
|
||
rigo_type: Rigo classification (A1, A2, A3, B1, B2, C1, C2, E1, E2)
|
||
template_type: "regular" or "vase"
|
||
|
||
Returns:
|
||
Marker positions and basis vectors
|
||
"""
|
||
if rigo_type not in AVAILABLE_RIGO_TYPES:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"Invalid rigo_type. Must be one of: {AVAILABLE_RIGO_TYPES}"
|
||
)
|
||
|
||
if template_type not in ["regular", "vase"]:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail="template_type must be 'regular' or 'vase'"
|
||
)
|
||
|
||
try:
|
||
markers = load_template_markers(rigo_type, template_type)
|
||
return {
|
||
"rigo_type": rigo_type,
|
||
"template_type": template_type,
|
||
**markers
|
||
}
|
||
except FileNotFoundError as e:
|
||
raise HTTPException(status_code=404, detail=str(e))
|
||
|
||
|
||
@router.post("/generate-glb", summary="Generate GLB brace with markers")
|
||
async def generate_glb_endpoint(
|
||
req: Request,
|
||
rigo_type: str = Form(..., description="Rigo classification (A1-E2)"),
|
||
template_type: str = Form("regular", description="Template type: 'regular' or 'vase'"),
|
||
case_id: str = Form(..., description="Case identifier"),
|
||
cobb_pt: float = Form(0.0, description="Proximal Thoracic Cobb angle"),
|
||
cobb_mt: float = Form(0.0, description="Main Thoracic Cobb angle"),
|
||
cobb_tl: float = Form(0.0, description="Thoracolumbar Cobb angle"),
|
||
body_scan: Optional[UploadFile] = File(None, description="Optional 3D body scan STL")
|
||
):
|
||
"""
|
||
Generate a GLB brace with embedded markers.
|
||
|
||
This endpoint generates a brace file that includes marker positions
|
||
for later editing. Optionally fits to a body scan.
|
||
|
||
**Pressure Zones in Output:**
|
||
- LM_PAD_TH: Thoracic pad (pushes INWARD on curve convex side)
|
||
- LM_BAY_TH: Thoracic bay (creates SPACE on curve concave side)
|
||
- LM_PAD_LUM: Lumbar pad (pushes INWARD)
|
||
- LM_BAY_LUM: Lumbar bay (creates SPACE)
|
||
- LM_ANCHOR_HIP_L/R: Hip anchors (stabilize brace)
|
||
|
||
Returns:
|
||
GLB and STL file paths, marker positions, pressure zone info
|
||
"""
|
||
if rigo_type not in AVAILABLE_RIGO_TYPES:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"Invalid rigo_type. Must be one of: {AVAILABLE_RIGO_TYPES}"
|
||
)
|
||
|
||
if template_type not in ["regular", "vase"]:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail="template_type must be 'regular' or 'vase'"
|
||
)
|
||
|
||
import tempfile
|
||
from pathlib import Path
|
||
|
||
output_dir = Path(tempfile.gettempdir()) / "brace_generator" / case_id
|
||
output_dir.mkdir(parents=True, exist_ok=True)
|
||
|
||
body_scan_path = None
|
||
|
||
# Save body scan if provided
|
||
if body_scan:
|
||
body_ext = Path(body_scan.filename).suffix if body_scan.filename else ".stl"
|
||
body_scan_path = str(output_dir / f"body_scan{body_ext}")
|
||
with open(body_scan_path, "wb") as f:
|
||
content = await body_scan.read()
|
||
f.write(content)
|
||
|
||
cobb_angles = {
|
||
"PT": cobb_pt,
|
||
"MT": cobb_mt,
|
||
"TL": cobb_tl
|
||
}
|
||
|
||
try:
|
||
result = generate_glb_brace(
|
||
rigo_type=rigo_type,
|
||
template_type=template_type,
|
||
output_dir=output_dir,
|
||
case_id=case_id,
|
||
cobb_angles=cobb_angles,
|
||
body_scan_path=body_scan_path,
|
||
clearance_mm=8.0
|
||
)
|
||
|
||
return {
|
||
"success": True,
|
||
"case_id": case_id,
|
||
"rigo_type": rigo_type,
|
||
"template_type": template_type,
|
||
"outputs": {
|
||
"glb": result.glb_path,
|
||
"stl": result.stl_path,
|
||
"json": result.json_path
|
||
},
|
||
"markers": result.markers,
|
||
"basis": result.basis,
|
||
"pressure_zones": result.pressure_zones,
|
||
"mesh_stats": result.mesh_stats,
|
||
"body_fitting": result.transform_applied
|
||
}
|
||
except FileNotFoundError as e:
|
||
raise HTTPException(status_code=404, detail=str(e))
|
||
except Exception as e:
|
||
raise HTTPException(status_code=500, detail=str(e))
|
||
|
||
|
||
@router.post("/generate-both-braces", summary="Generate both brace types for comparison")
|
||
async def generate_both_braces_endpoint(
|
||
req: Request,
|
||
rigo_type: str = Form(..., description="Rigo classification (A1-E2)"),
|
||
case_id: str = Form(..., description="Case identifier"),
|
||
cobb_pt: float = Form(0.0, description="Proximal Thoracic Cobb angle"),
|
||
cobb_mt: float = Form(0.0, description="Main Thoracic Cobb angle"),
|
||
cobb_tl: float = Form(0.0, description="Thoracolumbar Cobb angle"),
|
||
body_scan: Optional[UploadFile] = File(None, description="Optional 3D body scan STL"),
|
||
body_scan_path: Optional[str] = Form(None, description="Optional path to existing body scan file"),
|
||
clearance_mm: float = Form(8.0, description="Brace clearance from body in mm")
|
||
):
|
||
"""
|
||
Generate BOTH regular and vase brace types for side-by-side comparison.
|
||
|
||
This allows the user to compare the two brace shapes and choose
|
||
the preferred design.
|
||
|
||
Returns:
|
||
Both brace files with markers and pressure zones
|
||
"""
|
||
if rigo_type not in AVAILABLE_RIGO_TYPES:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"Invalid rigo_type. Must be one of: {AVAILABLE_RIGO_TYPES}"
|
||
)
|
||
|
||
import tempfile
|
||
from pathlib import Path
|
||
|
||
output_dir = Path(tempfile.gettempdir()) / "brace_generator" / case_id
|
||
output_dir.mkdir(parents=True, exist_ok=True)
|
||
|
||
final_body_scan_path = None
|
||
|
||
# Save body scan if uploaded as file
|
||
if body_scan:
|
||
body_ext = Path(body_scan.filename).suffix if body_scan.filename else ".stl"
|
||
final_body_scan_path = str(output_dir / f"body_scan{body_ext}")
|
||
with open(final_body_scan_path, "wb") as f:
|
||
content = await body_scan.read()
|
||
f.write(content)
|
||
# Or use provided path if it exists
|
||
elif body_scan_path and Path(body_scan_path).exists():
|
||
final_body_scan_path = body_scan_path
|
||
print(f"Using existing body scan at: {body_scan_path}")
|
||
|
||
cobb_angles = {
|
||
"PT": cobb_pt,
|
||
"MT": cobb_mt,
|
||
"TL": cobb_tl
|
||
}
|
||
|
||
try:
|
||
results = generate_both_brace_types(
|
||
rigo_type=rigo_type,
|
||
output_dir=output_dir,
|
||
case_id=case_id,
|
||
cobb_angles=cobb_angles,
|
||
body_scan_path=final_body_scan_path,
|
||
clearance_mm=clearance_mm
|
||
)
|
||
|
||
response = {
|
||
"success": True,
|
||
"case_id": case_id,
|
||
"rigo_type": rigo_type,
|
||
"cobb_angles": cobb_angles,
|
||
"body_scan_used": final_body_scan_path is not None,
|
||
"braces": {}
|
||
}
|
||
|
||
for brace_type, result in results.items():
|
||
if isinstance(result, dict) and "error" in result:
|
||
response["braces"][brace_type] = result
|
||
else:
|
||
response["braces"][brace_type] = {
|
||
"outputs": {
|
||
"glb": result.glb_path,
|
||
"stl": result.stl_path,
|
||
"json": result.json_path
|
||
},
|
||
"markers": result.markers,
|
||
"pressure_zones": result.pressure_zones,
|
||
"mesh_stats": result.mesh_stats
|
||
}
|
||
|
||
return response
|
||
except Exception as e:
|
||
raise HTTPException(status_code=500, detail=str(e))
|
||
|
||
|
||
@router.get("/pressure-zones/{rigo_type}", summary="Get pressure zone information")
|
||
async def get_pressure_zones(
|
||
rigo_type: str,
|
||
template_type: str = "regular",
|
||
cobb_mt: float = 25.0,
|
||
cobb_tl: float = 15.0
|
||
):
|
||
"""
|
||
Get detailed pressure zone information for a Rigo type.
|
||
|
||
This explains WHERE and HOW MUCH pressure is applied based on
|
||
the Cobb angles.
|
||
|
||
**Pressure Zone Types:**
|
||
|
||
- **PAD (Push Zone)**: Pushes INWARD on the convex side of the curve
|
||
to apply corrective force. Depth increases with Cobb angle severity.
|
||
|
||
- **BAY (Expansion Zone)**: Creates SPACE on the concave side for the
|
||
body to shift into during correction. Clearance is ~1.3x pad depth.
|
||
|
||
- **ANCHOR (Stability Zone)**: Grips the pelvis to prevent the brace
|
||
from riding up. Light inward pressure.
|
||
|
||
Returns:
|
||
Detailed pressure zone descriptions with depths in mm
|
||
"""
|
||
if rigo_type not in AVAILABLE_RIGO_TYPES:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"Invalid rigo_type. Must be one of: {AVAILABLE_RIGO_TYPES}"
|
||
)
|
||
|
||
try:
|
||
markers = load_template_markers(rigo_type, template_type)
|
||
zones = calculate_pressure_zones(
|
||
markers,
|
||
rigo_type,
|
||
{"PT": 0, "MT": cobb_mt, "TL": cobb_tl}
|
||
)
|
||
|
||
return {
|
||
"rigo_type": rigo_type,
|
||
"template_type": template_type,
|
||
"cobb_angles": {"MT": cobb_mt, "TL": cobb_tl},
|
||
"pressure_zones": [
|
||
{
|
||
"name": z.name,
|
||
"marker": z.marker_name,
|
||
"position": list(z.position),
|
||
"type": z.zone_type,
|
||
"direction": z.direction,
|
||
"function": z.function,
|
||
"depth_mm": round(z.depth_mm, 1),
|
||
"radius_mm": list(z.radius_mm)
|
||
}
|
||
for z in zones
|
||
],
|
||
"explanation": {
|
||
"pad_depth": f"Based on Cobb angle severity: {cobb_mt}° MT → {round(8 + min(max((cobb_mt - 10) / 40, 0), 1) * 14, 1)}mm thoracic pad",
|
||
"bay_clearance": "Bay clearance = 1.3 × pad depth + 4-5mm to allow body movement",
|
||
"hip_anchors": "4mm inward pressure to grip pelvis and stabilize brace"
|
||
}
|
||
}
|
||
except FileNotFoundError as e:
|
||
raise HTTPException(status_code=404, detail=str(e))
|
||
|
||
|
||
# =============================================================================
|
||
# DEV MODE: LOCAL FILE STORAGE AND SERVING
|
||
# =============================================================================
|
||
|
||
# Local storage directory for DEV mode
|
||
DEV_STORAGE_DIR = config.TEMP_DIR / "dev_storage"
|
||
DEV_STORAGE_DIR.mkdir(parents=True, exist_ok=True)
|
||
|
||
|
||
@router.post("/cases", summary="Create a new case (DEV)")
|
||
async def create_case():
|
||
"""Create a new case with a generated ID (DEV mode)."""
|
||
import uuid
|
||
from datetime import datetime
|
||
|
||
case_id = f"case-{datetime.now().strftime('%Y%m%d')}-{uuid.uuid4().hex[:8]}"
|
||
case_dir = DEV_STORAGE_DIR / case_id
|
||
(case_dir / "uploads").mkdir(parents=True, exist_ok=True)
|
||
(case_dir / "outputs").mkdir(parents=True, exist_ok=True)
|
||
|
||
# Save case metadata
|
||
metadata = {
|
||
"case_id": case_id,
|
||
"created_at": datetime.now().isoformat(),
|
||
"status": "created"
|
||
}
|
||
(case_dir / "case.json").write_text(json.dumps(metadata, indent=2))
|
||
|
||
return {"caseId": case_id, "status": "created"}
|
||
|
||
|
||
@router.get("/cases/{case_id}", summary="Get case details (DEV)")
|
||
async def get_case(case_id: str):
|
||
"""Get case details (DEV mode)."""
|
||
case_dir = DEV_STORAGE_DIR / case_id
|
||
|
||
if not case_dir.exists():
|
||
raise HTTPException(status_code=404, detail=f"Case not found: {case_id}")
|
||
|
||
metadata_file = case_dir / "case.json"
|
||
if metadata_file.exists():
|
||
metadata = json.loads(metadata_file.read_text())
|
||
else:
|
||
metadata = {"case_id": case_id, "status": "unknown"}
|
||
|
||
return metadata
|
||
|
||
|
||
@router.post("/cases/{case_id}/upload", summary="Upload X-ray for case (DEV)")
|
||
async def upload_xray(
|
||
case_id: str,
|
||
file: UploadFile = File(..., description="X-ray image file")
|
||
):
|
||
"""Upload X-ray image for a case (DEV mode - saves locally)."""
|
||
case_dir = DEV_STORAGE_DIR / case_id
|
||
uploads_dir = case_dir / "uploads"
|
||
uploads_dir.mkdir(parents=True, exist_ok=True)
|
||
|
||
# Determine extension from filename
|
||
ext = Path(file.filename).suffix.lower() if file.filename else ".jpg"
|
||
if ext not in [".jpg", ".jpeg", ".png", ".webp"]:
|
||
ext = ".jpg"
|
||
|
||
# Save as xray.{ext}
|
||
xray_path = uploads_dir / f"xray{ext}"
|
||
contents = await file.read()
|
||
xray_path.write_bytes(contents)
|
||
|
||
# Update case metadata
|
||
metadata_file = case_dir / "case.json"
|
||
if metadata_file.exists():
|
||
metadata = json.loads(metadata_file.read_text())
|
||
else:
|
||
metadata = {"case_id": case_id}
|
||
|
||
metadata["xray_uploaded"] = True
|
||
metadata["xray_filename"] = f"xray{ext}"
|
||
metadata_file.write_text(json.dumps(metadata, indent=2))
|
||
|
||
return {
|
||
"filename": f"xray{ext}",
|
||
"path": f"/files/uploads/{case_id}/xray{ext}"
|
||
}
|
||
|
||
|
||
@router.get("/cases/{case_id}/assets", summary="Get case assets (DEV)")
|
||
async def get_case_assets(case_id: str):
|
||
"""List all uploaded and output files for a case (DEV mode)."""
|
||
case_dir = DEV_STORAGE_DIR / case_id
|
||
|
||
if not case_dir.exists():
|
||
raise HTTPException(status_code=404, detail=f"Case not found: {case_id}")
|
||
|
||
uploads = []
|
||
outputs = []
|
||
|
||
# List uploads
|
||
uploads_dir = case_dir / "uploads"
|
||
if uploads_dir.exists():
|
||
for f in uploads_dir.iterdir():
|
||
if f.is_file():
|
||
uploads.append({
|
||
"filename": f.name,
|
||
"url": f"/files/uploads/{case_id}/{f.name}"
|
||
})
|
||
|
||
# List outputs
|
||
outputs_dir = case_dir / "outputs"
|
||
if outputs_dir.exists():
|
||
for f in outputs_dir.iterdir():
|
||
if f.is_file():
|
||
outputs.append({
|
||
"filename": f.name,
|
||
"url": f"/files/outputs/{case_id}/{f.name}"
|
||
})
|
||
|
||
return {
|
||
"caseId": case_id,
|
||
"assets": {
|
||
"uploads": uploads,
|
||
"outputs": outputs
|
||
}
|
||
}
|
||
|
||
|
||
@router.get("/files/uploads/{case_id}/{filename}", summary="Serve uploaded file (DEV)")
|
||
async def serve_upload_file(case_id: str, filename: str):
|
||
"""Serve an uploaded file (DEV mode)."""
|
||
file_path = DEV_STORAGE_DIR / case_id / "uploads" / filename
|
||
|
||
if not file_path.exists():
|
||
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
|
||
|
||
# Determine media type
|
||
ext = file_path.suffix.lower()
|
||
media_types = {
|
||
".jpg": "image/jpeg",
|
||
".jpeg": "image/jpeg",
|
||
".png": "image/png",
|
||
".webp": "image/webp",
|
||
".stl": "application/octet-stream",
|
||
".glb": "model/gltf-binary",
|
||
".json": "application/json",
|
||
}
|
||
media_type = media_types.get(ext, "application/octet-stream")
|
||
|
||
return FileResponse(
|
||
path=str(file_path),
|
||
filename=filename,
|
||
media_type=media_type
|
||
)
|
||
|
||
|
||
@router.get("/files/outputs/{case_id}/{filename}", summary="Serve output file (DEV)")
|
||
async def serve_output_file(case_id: str, filename: str):
|
||
"""Serve an output file (DEV mode)."""
|
||
file_path = DEV_STORAGE_DIR / case_id / "outputs" / filename
|
||
|
||
if not file_path.exists():
|
||
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
|
||
|
||
# Determine media type
|
||
ext = file_path.suffix.lower()
|
||
media_types = {
|
||
".jpg": "image/jpeg",
|
||
".jpeg": "image/jpeg",
|
||
".png": "image/png",
|
||
".webp": "image/webp",
|
||
".stl": "application/octet-stream",
|
||
".ply": "application/octet-stream",
|
||
".obj": "application/octet-stream",
|
||
".glb": "model/gltf-binary",
|
||
".gltf": "model/gltf+json",
|
||
".json": "application/json",
|
||
}
|
||
media_type = media_types.get(ext, "application/octet-stream")
|
||
|
||
return FileResponse(
|
||
path=str(file_path),
|
||
filename=filename,
|
||
media_type=media_type
|
||
)
|