Files
planet/backend/app/api/v1/visualization.py
rayd1o c82e1d5a04 fix: 修复3D地球坐标映射多个严重bug
## Bug修复详情

### 1. 致命错误:球面距离计算 (calculateDistance)
- 问题:使用勾股定理计算经纬度距离,在球体表面完全错误
- 修复:改用Haversine公式计算球面大圆距离
- 影响:赤道1度=111km,极地1度=19km,原计算误差巨大

### 2. 经度范围规范化 (vector3ToLatLon)
- 问题:Math.atan2返回[-180°,180°],转换后可能超出标准范围
- 修复:添加while循环规范化到[-180, 180]区间
- 影响:避免本初子午线附近返回360°的异常值

### 3. 屏幕坐标转换支持非全屏 (screenToEarthCoords)
- 问题:假设Canvas永远全屏,非全屏时点击偏移严重
- 修复:新增domElement参数,使用getBoundingClientRect()计算相对坐标
- 影响:嵌入式3D地球组件也能精准拾取

### 4. 地球旋转时经纬度映射错误
- 问题:Raycaster返回世界坐标,未考虑地球自转
- 修复:使用earth.worldToLocal()转换到本地坐标空间
- 影响:地球旋转时经纬度显示正确跟随

## 新增功能

- CelesTrak卫星数据采集器
- Space-Track卫星数据采集器
- 卫星可视化模块(500颗,实时SGP4轨道计算)
- 海底光缆悬停显示info-card
- 统一info-card组件
- 工具栏按钮(Stellarium风格)
- 缩放控制(百分比显示)
- Docker volume映射(代码热更新)

## 文件变更

- utils.js: 坐标转换核心逻辑修复
- satellites.js: 新增卫星可视化
- cables.js: 悬停交互支持
- main.js: 悬停/锁定逻辑
- controls.js: 工具栏UI
- info-card.js: 统一卡片组件
- docker-compose.yml: volume映射
- restart.sh: 简化重启脚本
2026-03-17 04:10:24 +08:00

537 lines
18 KiB
Python

"""Visualization API - GeoJSON endpoints for 3D Earth display
Unified API for all visualization data sources.
Returns GeoJSON format compatible with Three.js, CesiumJS, and Unreal Cesium.
"""
from datetime import datetime
from fastapi import APIRouter, HTTPException, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func
from typing import List, Dict, Any, Optional
from app.db.session import get_db
from app.models.collected_data import CollectedData
from app.services.cable_graph import build_graph_from_data, CableGraph
router = APIRouter()
# ============== Converter Functions ==============
def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert cable records to GeoJSON FeatureCollection"""
features = []
for record in records:
metadata = record.extra_data or {}
route_coords = metadata.get("route_coordinates", [])
if not route_coords:
continue
all_lines = []
# Handle both old format (flat array) and new format (array of arrays)
if route_coords and isinstance(route_coords[0], list):
# New format: array of arrays (MultiLineString structure)
if route_coords and isinstance(route_coords[0][0], list):
# Array of arrays of arrays - multiple lines
for line in route_coords:
line_coords = []
for point in line:
if len(point) >= 2:
try:
lon = float(point[0])
lat = float(point[1])
line_coords.append([lon, lat])
except (ValueError, TypeError):
continue
if len(line_coords) >= 2:
all_lines.append(line_coords)
else:
# Old format: flat array of points - treat as single line
line_coords = []
for point in route_coords:
if len(point) >= 2:
try:
lon = float(point[0])
lat = float(point[1])
line_coords.append([lon, lat])
except (ValueError, TypeError):
continue
if len(line_coords) >= 2:
all_lines.append(line_coords)
if not all_lines:
continue
# Use MultiLineString format to preserve cable segments
features.append(
{
"type": "Feature",
"geometry": {"type": "MultiLineString", "coordinates": all_lines},
"properties": {
"id": record.id,
"cable_id": record.name,
"source_id": record.source_id,
"Name": record.name,
"name": record.name,
"owner": metadata.get("owners"),
"owners": metadata.get("owners"),
"rfs": metadata.get("rfs"),
"RFS": metadata.get("rfs"),
"status": metadata.get("status", "active"),
"length": record.value,
"length_km": record.value,
"SHAPE__Length": record.value,
"url": metadata.get("url"),
"color": metadata.get("color"),
"year": metadata.get("year"),
},
}
)
return {"type": "FeatureCollection", "features": features}
def convert_landing_point_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert landing point records to GeoJSON FeatureCollection"""
features = []
for record in records:
try:
lat = float(record.latitude) if record.latitude else None
lon = float(record.longitude) if record.longitude else None
except (ValueError, TypeError):
continue
if lat is None or lon is None:
continue
metadata = record.extra_data or {}
features.append(
{
"type": "Feature",
"geometry": {"type": "Point", "coordinates": [lon, lat]},
"properties": {
"id": record.id,
"source_id": record.source_id,
"name": record.name,
"country": record.country,
"city": record.city,
"is_tbd": metadata.get("is_tbd", False),
},
}
)
return {"type": "FeatureCollection", "features": features}
def convert_satellite_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert satellite TLE records to GeoJSON"""
features = []
for record in records:
metadata = record.extra_data or {}
norad_id = metadata.get("norad_cat_id")
if not norad_id:
continue
features.append(
{
"type": "Feature",
"id": norad_id,
"geometry": {"type": "Point", "coordinates": [0, 0, 0]},
"properties": {
"id": record.id,
"norad_cat_id": norad_id,
"name": record.name,
"international_designator": metadata.get("international_designator"),
"epoch": metadata.get("epoch"),
"inclination": metadata.get("inclination"),
"raan": metadata.get("raan"),
"eccentricity": metadata.get("eccentricity"),
"arg_of_perigee": metadata.get("arg_of_perigee"),
"mean_anomaly": metadata.get("mean_anomaly"),
"mean_motion": metadata.get("mean_motion"),
"bstar": metadata.get("bstar"),
"classification_type": metadata.get("classification_type"),
"data_type": "satellite_tle",
},
}
)
return {"type": "FeatureCollection", "features": features}
def convert_supercomputer_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert TOP500 supercomputer records to GeoJSON"""
features = []
for record in records:
try:
lat = float(record.latitude) if record.latitude and record.latitude != "0.0" else None
lon = (
float(record.longitude) if record.longitude and record.longitude != "0.0" else None
)
except (ValueError, TypeError):
lat, lon = None, None
metadata = record.extra_data or {}
features.append(
{
"type": "Feature",
"id": record.id,
"geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]},
"properties": {
"id": record.id,
"name": record.name,
"rank": metadata.get("rank"),
"r_max": record.value,
"r_peak": metadata.get("r_peak"),
"cores": metadata.get("cores"),
"power": metadata.get("power"),
"country": record.country,
"city": record.city,
"data_type": "supercomputer",
},
}
)
return {"type": "FeatureCollection", "features": features}
def convert_gpu_cluster_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert GPU cluster records to GeoJSON"""
features = []
for record in records:
try:
lat = float(record.latitude) if record.latitude else None
lon = float(record.longitude) if record.longitude else None
except (ValueError, TypeError):
lat, lon = None, None
metadata = record.extra_data or {}
features.append(
{
"type": "Feature",
"id": record.id,
"geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]},
"properties": {
"id": record.id,
"name": record.name,
"country": record.country,
"city": record.city,
"metadata": metadata,
"data_type": "gpu_cluster",
},
}
)
return {"type": "FeatureCollection", "features": features}
# ============== API Endpoints ==============
@router.get("/geo/cables")
async def get_cables_geojson(db: AsyncSession = Depends(get_db)):
"""获取海底电缆 GeoJSON 数据 (LineString)"""
try:
stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
raise HTTPException(
status_code=404,
detail="No cable data found. Please run the arcgis_cables collector first.",
)
return convert_cable_to_geojson(records)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}")
@router.get("/geo/landing-points")
async def get_landing_points_geojson(db: AsyncSession = Depends(get_db)):
"""获取登陆点 GeoJSON 数据 (Point)"""
try:
stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
raise HTTPException(
status_code=404,
detail="No landing point data found. Please run the arcgis_landing_points collector first.",
)
return convert_landing_point_to_geojson(records)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}")
@router.get("/geo/all")
async def get_all_geojson(db: AsyncSession = Depends(get_db)):
"""获取所有可视化数据 (电缆 + 登陆点)"""
cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
cables_result = await db.execute(cables_stmt)
cables_records = cables_result.scalars().all()
points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
points_result = await db.execute(points_stmt)
points_records = points_result.scalars().all()
cables = (
convert_cable_to_geojson(cables_records)
if cables_records
else {"type": "FeatureCollection", "features": []}
)
points = (
convert_landing_point_to_geojson(points_records)
if points_records
else {"type": "FeatureCollection", "features": []}
)
return {
"cables": cables,
"landing_points": points,
"stats": {
"cable_count": len(cables.get("features", [])) if cables else 0,
"landing_point_count": len(points.get("features", [])) if points else 0,
},
}
@router.get("/geo/satellites")
async def get_satellites_geojson(
limit: int = 10000,
db: AsyncSession = Depends(get_db),
):
"""获取卫星 TLE GeoJSON 数据"""
stmt = (
select(CollectedData)
.where(CollectedData.source == "celestrak_tle")
.where(CollectedData.name != "Unknown")
.order_by(CollectedData.id.desc())
.limit(limit)
)
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
return {"type": "FeatureCollection", "features": [], "count": 0}
geojson = convert_satellite_to_geojson(list(records))
return {
**geojson,
"count": len(geojson.get("features", [])),
}
@router.get("/geo/supercomputers")
async def get_supercomputers_geojson(
limit: int = 500,
db: AsyncSession = Depends(get_db),
):
"""获取 TOP500 超算中心 GeoJSON 数据"""
stmt = (
select(CollectedData)
.where(CollectedData.source == "top500")
.where(CollectedData.name != "Unknown")
.limit(limit)
)
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
return {"type": "FeatureCollection", "features": [], "count": 0}
geojson = convert_supercomputer_to_geojson(list(records))
return {
**geojson,
"count": len(geojson.get("features", [])),
}
@router.get("/geo/gpu-clusters")
async def get_gpu_clusters_geojson(
limit: int = 100,
db: AsyncSession = Depends(get_db),
):
"""获取 GPU 集群 GeoJSON 数据"""
stmt = (
select(CollectedData)
.where(CollectedData.source == "epoch_ai_gpu")
.where(CollectedData.name != "Unknown")
.limit(limit)
)
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
return {"type": "FeatureCollection", "features": [], "count": 0}
geojson = convert_gpu_cluster_to_geojson(list(records))
return {
**geojson,
"count": len(geojson.get("features", [])),
}
@router.get("/all")
async def get_all_visualization_data(db: AsyncSession = Depends(get_db)):
"""获取所有可视化数据的统一端点
Returns GeoJSON FeatureCollections for all data types:
- satellites: 卫星 TLE 数据
- cables: 海底电缆
- landing_points: 登陆点
- supercomputers: TOP500 超算
- gpu_clusters: GPU 集群
"""
cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
cables_result = await db.execute(cables_stmt)
cables_records = list(cables_result.scalars().all())
points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
points_result = await db.execute(points_stmt)
points_records = list(points_result.scalars().all())
satellites_stmt = (
select(CollectedData)
.where(CollectedData.source == "celestrak_tle")
.where(CollectedData.name != "Unknown")
)
satellites_result = await db.execute(satellites_stmt)
satellites_records = list(satellites_result.scalars().all())
supercomputers_stmt = (
select(CollectedData)
.where(CollectedData.source == "top500")
.where(CollectedData.name != "Unknown")
)
supercomputers_result = await db.execute(supercomputers_stmt)
supercomputers_records = list(supercomputers_result.scalars().all())
gpu_stmt = (
select(CollectedData)
.where(CollectedData.source == "epoch_ai_gpu")
.where(CollectedData.name != "Unknown")
)
gpu_result = await db.execute(gpu_stmt)
gpu_records = list(gpu_result.scalars().all())
cables = (
convert_cable_to_geojson(cables_records)
if cables_records
else {"type": "FeatureCollection", "features": []}
)
landing_points = (
convert_landing_point_to_geojson(points_records)
if points_records
else {"type": "FeatureCollection", "features": []}
)
satellites = (
convert_satellite_to_geojson(satellites_records)
if satellites_records
else {"type": "FeatureCollection", "features": []}
)
supercomputers = (
convert_supercomputer_to_geojson(supercomputers_records)
if supercomputers_records
else {"type": "FeatureCollection", "features": []}
)
gpu_clusters = (
convert_gpu_cluster_to_geojson(gpu_records)
if gpu_records
else {"type": "FeatureCollection", "features": []}
)
return {
"generated_at": datetime.utcnow().isoformat() + "Z",
"version": "1.0",
"data": {
"satellites": satellites,
"cables": cables,
"landing_points": landing_points,
"supercomputers": supercomputers,
"gpu_clusters": gpu_clusters,
},
"stats": {
"total_features": (
len(satellites.get("features", []))
+ len(cables.get("features", []))
+ len(landing_points.get("features", []))
+ len(supercomputers.get("features", []))
+ len(gpu_clusters.get("features", []))
),
"satellites": len(satellites.get("features", [])),
"cables": len(cables.get("features", [])),
"landing_points": len(landing_points.get("features", [])),
"supercomputers": len(supercomputers.get("features", [])),
"gpu_clusters": len(gpu_clusters.get("features", [])),
},
}
# Cache for cable graph
_cable_graph: Optional[CableGraph] = None
async def get_cable_graph(db: AsyncSession) -> CableGraph:
"""Get or build cable graph (cached)"""
global _cable_graph
if _cable_graph is None:
cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
cables_result = await db.execute(cables_stmt)
cables_records = list(cables_result.scalars().all())
points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
points_result = await db.execute(points_stmt)
points_records = list(points_result.scalars().all())
cables_data = convert_cable_to_geojson(cables_records)
points_data = convert_landing_point_to_geojson(points_records)
_cable_graph = build_graph_from_data(cables_data, points_data)
return _cable_graph
@router.post("/geo/path")
async def find_path(
start: List[float],
end: List[float],
db: AsyncSession = Depends(get_db),
):
"""Find shortest path between two coordinates via cable network"""
if not start or len(start) != 2:
raise HTTPException(status_code=400, detail="Start must be [lon, lat]")
if not end or len(end) != 2:
raise HTTPException(status_code=400, detail="End must be [lon, lat]")
graph = await get_cable_graph(db)
result = graph.find_shortest_path(start, end)
if not result:
raise HTTPException(
status_code=404,
detail="No path found between these points. They may be too far from any landing point.",
)
return result