"""Visualization API - GeoJSON endpoints for 3D Earth display Unified API for all visualization data sources. Returns GeoJSON format compatible with Three.js, CesiumJS, and Unreal Cesium. """ from datetime import datetime from fastapi import APIRouter, HTTPException, Depends from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select, func from typing import List, Dict, Any, Optional from app.db.session import get_db from app.models.collected_data import CollectedData from app.services.cable_graph import build_graph_from_data, CableGraph router = APIRouter() # ============== Converter Functions ============== def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]: """Convert cable records to GeoJSON FeatureCollection""" features = [] for record in records: metadata = record.extra_data or {} route_coords = metadata.get("route_coordinates", []) if not route_coords: continue all_lines = [] # Handle both old format (flat array) and new format (array of arrays) if route_coords and isinstance(route_coords[0], list): # New format: array of arrays (MultiLineString structure) if route_coords and isinstance(route_coords[0][0], list): # Array of arrays of arrays - multiple lines for line in route_coords: line_coords = [] for point in line: if len(point) >= 2: try: lon = float(point[0]) lat = float(point[1]) line_coords.append([lon, lat]) except (ValueError, TypeError): continue if len(line_coords) >= 2: all_lines.append(line_coords) else: # Old format: flat array of points - treat as single line line_coords = [] for point in route_coords: if len(point) >= 2: try: lon = float(point[0]) lat = float(point[1]) line_coords.append([lon, lat]) except (ValueError, TypeError): continue if len(line_coords) >= 2: all_lines.append(line_coords) if not all_lines: continue # Use MultiLineString format to preserve cable segments features.append( { "type": "Feature", "geometry": {"type": "MultiLineString", "coordinates": all_lines}, "properties": { "id": record.id, "cable_id": record.name, "source_id": record.source_id, "Name": record.name, "name": record.name, "owner": metadata.get("owners"), "owners": metadata.get("owners"), "rfs": metadata.get("rfs"), "RFS": metadata.get("rfs"), "status": metadata.get("status", "active"), "length": record.value, "length_km": record.value, "SHAPE__Length": record.value, "url": metadata.get("url"), "color": metadata.get("color"), "year": metadata.get("year"), }, } ) return {"type": "FeatureCollection", "features": features} def convert_landing_point_to_geojson(records: List[CollectedData], city_to_cable_ids_map: Dict[int, List[int]] = None, cable_id_to_name_map: Dict[int, str] = None) -> Dict[str, Any]: features = [] for record in records: try: lat = float(record.latitude) if record.latitude else None lon = float(record.longitude) if record.longitude else None except (ValueError, TypeError): continue if lat is None or lon is None: continue metadata = record.extra_data or {} city_id = metadata.get("city_id") props = { "id": record.id, "source_id": record.source_id, "name": record.name, "country": record.country, "city": record.city, "is_tbd": metadata.get("is_tbd", False), } cable_names = [] if city_to_cable_ids_map and city_id in city_to_cable_ids_map: for cable_id in city_to_cable_ids_map[city_id]: if cable_id_to_name_map and cable_id in cable_id_to_name_map: cable_names.append(cable_id_to_name_map[cable_id]) if cable_names: props["cable_names"] = cable_names features.append( { "type": "Feature", "geometry": {"type": "Point", "coordinates": [lon, lat]}, "properties": props, } ) return {"type": "FeatureCollection", "features": features} def convert_satellite_to_geojson(records: List[CollectedData]) -> Dict[str, Any]: """Convert satellite TLE records to GeoJSON""" features = [] for record in records: metadata = record.extra_data or {} norad_id = metadata.get("norad_cat_id") if not norad_id: continue features.append( { "type": "Feature", "id": norad_id, "geometry": {"type": "Point", "coordinates": [0, 0, 0]}, "properties": { "id": record.id, "norad_cat_id": norad_id, "name": record.name, "international_designator": metadata.get("international_designator"), "epoch": metadata.get("epoch"), "inclination": metadata.get("inclination"), "raan": metadata.get("raan"), "eccentricity": metadata.get("eccentricity"), "arg_of_perigee": metadata.get("arg_of_perigee"), "mean_anomaly": metadata.get("mean_anomaly"), "mean_motion": metadata.get("mean_motion"), "bstar": metadata.get("bstar"), "classification_type": metadata.get("classification_type"), "data_type": "satellite_tle", }, } ) return {"type": "FeatureCollection", "features": features} def convert_supercomputer_to_geojson(records: List[CollectedData]) -> Dict[str, Any]: """Convert TOP500 supercomputer records to GeoJSON""" features = [] for record in records: try: lat = float(record.latitude) if record.latitude and record.latitude != "0.0" else None lon = ( float(record.longitude) if record.longitude and record.longitude != "0.0" else None ) except (ValueError, TypeError): lat, lon = None, None metadata = record.extra_data or {} features.append( { "type": "Feature", "id": record.id, "geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]}, "properties": { "id": record.id, "name": record.name, "rank": metadata.get("rank"), "r_max": record.value, "r_peak": metadata.get("r_peak"), "cores": metadata.get("cores"), "power": metadata.get("power"), "country": record.country, "city": record.city, "data_type": "supercomputer", }, } ) return {"type": "FeatureCollection", "features": features} def convert_gpu_cluster_to_geojson(records: List[CollectedData]) -> Dict[str, Any]: """Convert GPU cluster records to GeoJSON""" features = [] for record in records: try: lat = float(record.latitude) if record.latitude else None lon = float(record.longitude) if record.longitude else None except (ValueError, TypeError): lat, lon = None, None metadata = record.extra_data or {} features.append( { "type": "Feature", "id": record.id, "geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]}, "properties": { "id": record.id, "name": record.name, "country": record.country, "city": record.city, "metadata": metadata, "data_type": "gpu_cluster", }, } ) return {"type": "FeatureCollection", "features": features} # ============== API Endpoints ============== @router.get("/geo/cables") async def get_cables_geojson(db: AsyncSession = Depends(get_db)): """获取海底电缆 GeoJSON 数据 (LineString)""" try: stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables") result = await db.execute(stmt) records = result.scalars().all() if not records: raise HTTPException( status_code=404, detail="No cable data found. Please run the arcgis_cables collector first.", ) return convert_cable_to_geojson(records) except HTTPException: raise except Exception as e: raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}") @router.get("/geo/landing-points") async def get_landing_points_geojson(db: AsyncSession = Depends(get_db)): try: landing_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points") landing_result = await db.execute(landing_stmt) records = landing_result.scalars().all() relation_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cable_landing_relation") relation_result = await db.execute(relation_stmt) relation_records = relation_result.scalars().all() cable_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables") cable_result = await db.execute(cable_stmt) cable_records = cable_result.scalars().all() city_to_cable_ids_map = {} for rel in relation_records: if rel.extra_data: city_id = rel.extra_data.get("city_id") cable_id = rel.extra_data.get("cable_id") if city_id is not None and cable_id is not None: if city_id not in city_to_cable_ids_map: city_to_cable_ids_map[city_id] = [] if cable_id not in city_to_cable_ids_map[city_id]: city_to_cable_ids_map[city_id].append(cable_id) cable_id_to_name_map = {} for cable in cable_records: if cable.extra_data: cable_id = cable.extra_data.get("cable_id") cable_name = cable.name if cable_id and cable_name: cable_id_to_name_map[cable_id] = cable_name if not records: raise HTTPException( status_code=404, detail="No landing point data found. Please run the arcgis_landing_points collector first.", ) return convert_landing_point_to_geojson(records, city_to_cable_ids_map, cable_id_to_name_map) except HTTPException: raise except Exception as e: raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}") @router.get("/geo/all") async def get_all_geojson(db: AsyncSession = Depends(get_db)): cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables") cables_result = await db.execute(cables_stmt) cables_records = cables_result.scalars().all() points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points") points_result = await db.execute(points_stmt) points_records = points_result.scalars().all() relation_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cable_landing_relation") relation_result = await db.execute(relation_stmt) relation_records = relation_result.scalars().all() city_to_cable_ids_map = {} for rel in relation_records: if rel.extra_data: city_id = rel.extra_data.get("city_id") cable_id = rel.extra_data.get("cable_id") if city_id is not None and cable_id is not None: if city_id not in city_to_cable_ids_map: city_to_cable_ids_map[city_id] = [] if cable_id not in city_to_cable_ids_map[city_id]: city_to_cable_ids_map[city_id].append(cable_id) cable_id_to_name_map = {} for cable in cables_records: if cable.extra_data: cable_id = cable.extra_data.get("cable_id") cable_name = cable.name if cable_id and cable_name: cable_id_to_name_map[cable_id] = cable_name cables = ( convert_cable_to_geojson(cables_records) if cables_records else {"type": "FeatureCollection", "features": []} ) points = ( convert_landing_point_to_geojson(points_records, city_to_cable_ids_map, cable_id_to_name_map) if points_records else {"type": "FeatureCollection", "features": []} ) return { "cables": cables, "landing_points": points, "stats": { "cable_count": len(cables.get("features", [])) if cables else 0, "landing_point_count": len(points.get("features", [])) if points else 0, }, } @router.get("/geo/satellites") async def get_satellites_geojson( limit: int = 10000, db: AsyncSession = Depends(get_db), ): """获取卫星 TLE GeoJSON 数据""" stmt = ( select(CollectedData) .where(CollectedData.source == "celestrak_tle") .where(CollectedData.name != "Unknown") .order_by(CollectedData.id.desc()) .limit(limit) ) result = await db.execute(stmt) records = result.scalars().all() if not records: return {"type": "FeatureCollection", "features": [], "count": 0} geojson = convert_satellite_to_geojson(list(records)) return { **geojson, "count": len(geojson.get("features", [])), } @router.get("/geo/supercomputers") async def get_supercomputers_geojson( limit: int = 500, db: AsyncSession = Depends(get_db), ): """获取 TOP500 超算中心 GeoJSON 数据""" stmt = ( select(CollectedData) .where(CollectedData.source == "top500") .where(CollectedData.name != "Unknown") .limit(limit) ) result = await db.execute(stmt) records = result.scalars().all() if not records: return {"type": "FeatureCollection", "features": [], "count": 0} geojson = convert_supercomputer_to_geojson(list(records)) return { **geojson, "count": len(geojson.get("features", [])), } @router.get("/geo/gpu-clusters") async def get_gpu_clusters_geojson( limit: int = 100, db: AsyncSession = Depends(get_db), ): """获取 GPU 集群 GeoJSON 数据""" stmt = ( select(CollectedData) .where(CollectedData.source == "epoch_ai_gpu") .where(CollectedData.name != "Unknown") .limit(limit) ) result = await db.execute(stmt) records = result.scalars().all() if not records: return {"type": "FeatureCollection", "features": [], "count": 0} geojson = convert_gpu_cluster_to_geojson(list(records)) return { **geojson, "count": len(geojson.get("features", [])), } @router.get("/all") async def get_all_visualization_data(db: AsyncSession = Depends(get_db)): """获取所有可视化数据的统一端点 Returns GeoJSON FeatureCollections for all data types: - satellites: 卫星 TLE 数据 - cables: 海底电缆 - landing_points: 登陆点 - supercomputers: TOP500 超算 - gpu_clusters: GPU 集群 """ cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables") cables_result = await db.execute(cables_stmt) cables_records = list(cables_result.scalars().all()) points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points") points_result = await db.execute(points_stmt) points_records = list(points_result.scalars().all()) satellites_stmt = ( select(CollectedData) .where(CollectedData.source == "celestrak_tle") .where(CollectedData.name != "Unknown") ) satellites_result = await db.execute(satellites_stmt) satellites_records = list(satellites_result.scalars().all()) supercomputers_stmt = ( select(CollectedData) .where(CollectedData.source == "top500") .where(CollectedData.name != "Unknown") ) supercomputers_result = await db.execute(supercomputers_stmt) supercomputers_records = list(supercomputers_result.scalars().all()) gpu_stmt = ( select(CollectedData) .where(CollectedData.source == "epoch_ai_gpu") .where(CollectedData.name != "Unknown") ) gpu_result = await db.execute(gpu_stmt) gpu_records = list(gpu_result.scalars().all()) cables = ( convert_cable_to_geojson(cables_records) if cables_records else {"type": "FeatureCollection", "features": []} ) landing_points = ( convert_landing_point_to_geojson(points_records) if points_records else {"type": "FeatureCollection", "features": []} ) satellites = ( convert_satellite_to_geojson(satellites_records) if satellites_records else {"type": "FeatureCollection", "features": []} ) supercomputers = ( convert_supercomputer_to_geojson(supercomputers_records) if supercomputers_records else {"type": "FeatureCollection", "features": []} ) gpu_clusters = ( convert_gpu_cluster_to_geojson(gpu_records) if gpu_records else {"type": "FeatureCollection", "features": []} ) return { "generated_at": datetime.utcnow().isoformat() + "Z", "version": "1.0", "data": { "satellites": satellites, "cables": cables, "landing_points": landing_points, "supercomputers": supercomputers, "gpu_clusters": gpu_clusters, }, "stats": { "total_features": ( len(satellites.get("features", [])) + len(cables.get("features", [])) + len(landing_points.get("features", [])) + len(supercomputers.get("features", [])) + len(gpu_clusters.get("features", [])) ), "satellites": len(satellites.get("features", [])), "cables": len(cables.get("features", [])), "landing_points": len(landing_points.get("features", [])), "supercomputers": len(supercomputers.get("features", [])), "gpu_clusters": len(gpu_clusters.get("features", [])), }, } # Cache for cable graph _cable_graph: Optional[CableGraph] = None async def get_cable_graph(db: AsyncSession) -> CableGraph: """Get or build cable graph (cached)""" global _cable_graph if _cable_graph is None: cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables") cables_result = await db.execute(cables_stmt) cables_records = list(cables_result.scalars().all()) points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points") points_result = await db.execute(points_stmt) points_records = list(points_result.scalars().all()) cables_data = convert_cable_to_geojson(cables_records) points_data = convert_landing_point_to_geojson(points_records) _cable_graph = build_graph_from_data(cables_data, points_data) return _cable_graph @router.post("/geo/path") async def find_path( start: List[float], end: List[float], db: AsyncSession = Depends(get_db), ): """Find shortest path between two coordinates via cable network""" if not start or len(start) != 2: raise HTTPException(status_code=400, detail="Start must be [lon, lat]") if not end or len(end) != 2: raise HTTPException(status_code=400, detail="End must be [lon, lat]") graph = await get_cable_graph(db) result = graph.find_shortest_path(start, end) if not result: raise HTTPException( status_code=404, detail="No path found between these points. They may be too far from any landing point.", ) return result