fix: 修复3D地球坐标映射多个严重bug
## Bug修复详情 ### 1. 致命错误:球面距离计算 (calculateDistance) - 问题:使用勾股定理计算经纬度距离,在球体表面完全错误 - 修复:改用Haversine公式计算球面大圆距离 - 影响:赤道1度=111km,极地1度=19km,原计算误差巨大 ### 2. 经度范围规范化 (vector3ToLatLon) - 问题:Math.atan2返回[-180°,180°],转换后可能超出标准范围 - 修复:添加while循环规范化到[-180, 180]区间 - 影响:避免本初子午线附近返回360°的异常值 ### 3. 屏幕坐标转换支持非全屏 (screenToEarthCoords) - 问题:假设Canvas永远全屏,非全屏时点击偏移严重 - 修复:新增domElement参数,使用getBoundingClientRect()计算相对坐标 - 影响:嵌入式3D地球组件也能精准拾取 ### 4. 地球旋转时经纬度映射错误 - 问题:Raycaster返回世界坐标,未考虑地球自转 - 修复:使用earth.worldToLocal()转换到本地坐标空间 - 影响:地球旋转时经纬度显示正确跟随 ## 新增功能 - CelesTrak卫星数据采集器 - Space-Track卫星数据采集器 - 卫星可视化模块(500颗,实时SGP4轨道计算) - 海底光缆悬停显示info-card - 统一info-card组件 - 工具栏按钮(Stellarium风格) - 缩放控制(百分比显示) - Docker volume映射(代码热更新) ## 文件变更 - utils.js: 坐标转换核心逻辑修复 - satellites.js: 新增卫星可视化 - cables.js: 悬停交互支持 - main.js: 悬停/锁定逻辑 - controls.js: 工具栏UI - info-card.js: 统一卡片组件 - docker-compose.yml: volume映射 - restart.sh: 简化重启脚本
This commit is contained in:
@@ -1,8 +1,13 @@
|
||||
"""Visualization API - GeoJSON endpoints for 3D Earth display"""
|
||||
"""Visualization API - GeoJSON endpoints for 3D Earth display
|
||||
|
||||
Unified API for all visualization data sources.
|
||||
Returns GeoJSON format compatible with Three.js, CesiumJS, and Unreal Cesium.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, HTTPException, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import select, func
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
from app.db.session import get_db
|
||||
@@ -12,6 +17,9 @@ from app.services.cable_graph import build_graph_from_data, CableGraph
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# ============== Converter Functions ==============
|
||||
|
||||
|
||||
def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert cable records to GeoJSON FeatureCollection"""
|
||||
features = []
|
||||
@@ -122,6 +130,117 @@ def convert_landing_point_to_geojson(records: List[CollectedData]) -> Dict[str,
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_satellite_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert satellite TLE records to GeoJSON"""
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
metadata = record.extra_data or {}
|
||||
norad_id = metadata.get("norad_cat_id")
|
||||
|
||||
if not norad_id:
|
||||
continue
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": norad_id,
|
||||
"geometry": {"type": "Point", "coordinates": [0, 0, 0]},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"norad_cat_id": norad_id,
|
||||
"name": record.name,
|
||||
"international_designator": metadata.get("international_designator"),
|
||||
"epoch": metadata.get("epoch"),
|
||||
"inclination": metadata.get("inclination"),
|
||||
"raan": metadata.get("raan"),
|
||||
"eccentricity": metadata.get("eccentricity"),
|
||||
"arg_of_perigee": metadata.get("arg_of_perigee"),
|
||||
"mean_anomaly": metadata.get("mean_anomaly"),
|
||||
"mean_motion": metadata.get("mean_motion"),
|
||||
"bstar": metadata.get("bstar"),
|
||||
"classification_type": metadata.get("classification_type"),
|
||||
"data_type": "satellite_tle",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_supercomputer_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert TOP500 supercomputer records to GeoJSON"""
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
try:
|
||||
lat = float(record.latitude) if record.latitude and record.latitude != "0.0" else None
|
||||
lon = (
|
||||
float(record.longitude) if record.longitude and record.longitude != "0.0" else None
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
lat, lon = None, None
|
||||
|
||||
metadata = record.extra_data or {}
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": record.id,
|
||||
"geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"name": record.name,
|
||||
"rank": metadata.get("rank"),
|
||||
"r_max": record.value,
|
||||
"r_peak": metadata.get("r_peak"),
|
||||
"cores": metadata.get("cores"),
|
||||
"power": metadata.get("power"),
|
||||
"country": record.country,
|
||||
"city": record.city,
|
||||
"data_type": "supercomputer",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_gpu_cluster_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert GPU cluster records to GeoJSON"""
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
try:
|
||||
lat = float(record.latitude) if record.latitude else None
|
||||
lon = float(record.longitude) if record.longitude else None
|
||||
except (ValueError, TypeError):
|
||||
lat, lon = None, None
|
||||
|
||||
metadata = record.extra_data or {}
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": record.id,
|
||||
"geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"name": record.name,
|
||||
"country": record.country,
|
||||
"city": record.city,
|
||||
"metadata": metadata,
|
||||
"data_type": "gpu_cluster",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
# ============== API Endpoints ==============
|
||||
|
||||
|
||||
@router.get("/geo/cables")
|
||||
async def get_cables_geojson(db: AsyncSession = Depends(get_db)):
|
||||
"""获取海底电缆 GeoJSON 数据 (LineString)"""
|
||||
@@ -196,6 +315,178 @@ async def get_all_geojson(db: AsyncSession = Depends(get_db)):
|
||||
}
|
||||
|
||||
|
||||
@router.get("/geo/satellites")
|
||||
async def get_satellites_geojson(
|
||||
limit: int = 10000,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取卫星 TLE GeoJSON 数据"""
|
||||
stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "celestrak_tle")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
.order_by(CollectedData.id.desc())
|
||||
.limit(limit)
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
return {"type": "FeatureCollection", "features": [], "count": 0}
|
||||
|
||||
geojson = convert_satellite_to_geojson(list(records))
|
||||
return {
|
||||
**geojson,
|
||||
"count": len(geojson.get("features", [])),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/geo/supercomputers")
|
||||
async def get_supercomputers_geojson(
|
||||
limit: int = 500,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取 TOP500 超算中心 GeoJSON 数据"""
|
||||
stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "top500")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
.limit(limit)
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
return {"type": "FeatureCollection", "features": [], "count": 0}
|
||||
|
||||
geojson = convert_supercomputer_to_geojson(list(records))
|
||||
return {
|
||||
**geojson,
|
||||
"count": len(geojson.get("features", [])),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/geo/gpu-clusters")
|
||||
async def get_gpu_clusters_geojson(
|
||||
limit: int = 100,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取 GPU 集群 GeoJSON 数据"""
|
||||
stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "epoch_ai_gpu")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
.limit(limit)
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
return {"type": "FeatureCollection", "features": [], "count": 0}
|
||||
|
||||
geojson = convert_gpu_cluster_to_geojson(list(records))
|
||||
return {
|
||||
**geojson,
|
||||
"count": len(geojson.get("features", [])),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/all")
|
||||
async def get_all_visualization_data(db: AsyncSession = Depends(get_db)):
|
||||
"""获取所有可视化数据的统一端点
|
||||
|
||||
Returns GeoJSON FeatureCollections for all data types:
|
||||
- satellites: 卫星 TLE 数据
|
||||
- cables: 海底电缆
|
||||
- landing_points: 登陆点
|
||||
- supercomputers: TOP500 超算
|
||||
- gpu_clusters: GPU 集群
|
||||
"""
|
||||
cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
|
||||
cables_result = await db.execute(cables_stmt)
|
||||
cables_records = list(cables_result.scalars().all())
|
||||
|
||||
points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
|
||||
points_result = await db.execute(points_stmt)
|
||||
points_records = list(points_result.scalars().all())
|
||||
|
||||
satellites_stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "celestrak_tle")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
)
|
||||
satellites_result = await db.execute(satellites_stmt)
|
||||
satellites_records = list(satellites_result.scalars().all())
|
||||
|
||||
supercomputers_stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "top500")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
)
|
||||
supercomputers_result = await db.execute(supercomputers_stmt)
|
||||
supercomputers_records = list(supercomputers_result.scalars().all())
|
||||
|
||||
gpu_stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "epoch_ai_gpu")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
)
|
||||
gpu_result = await db.execute(gpu_stmt)
|
||||
gpu_records = list(gpu_result.scalars().all())
|
||||
|
||||
cables = (
|
||||
convert_cable_to_geojson(cables_records)
|
||||
if cables_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
landing_points = (
|
||||
convert_landing_point_to_geojson(points_records)
|
||||
if points_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
satellites = (
|
||||
convert_satellite_to_geojson(satellites_records)
|
||||
if satellites_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
supercomputers = (
|
||||
convert_supercomputer_to_geojson(supercomputers_records)
|
||||
if supercomputers_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
gpu_clusters = (
|
||||
convert_gpu_cluster_to_geojson(gpu_records)
|
||||
if gpu_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
|
||||
return {
|
||||
"generated_at": datetime.utcnow().isoformat() + "Z",
|
||||
"version": "1.0",
|
||||
"data": {
|
||||
"satellites": satellites,
|
||||
"cables": cables,
|
||||
"landing_points": landing_points,
|
||||
"supercomputers": supercomputers,
|
||||
"gpu_clusters": gpu_clusters,
|
||||
},
|
||||
"stats": {
|
||||
"total_features": (
|
||||
len(satellites.get("features", []))
|
||||
+ len(cables.get("features", []))
|
||||
+ len(landing_points.get("features", []))
|
||||
+ len(supercomputers.get("features", []))
|
||||
+ len(gpu_clusters.get("features", []))
|
||||
),
|
||||
"satellites": len(satellites.get("features", [])),
|
||||
"cables": len(cables.get("features", [])),
|
||||
"landing_points": len(landing_points.get("features", [])),
|
||||
"supercomputers": len(supercomputers.get("features", [])),
|
||||
"gpu_clusters": len(gpu_clusters.get("features", [])),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Cache for cable graph
|
||||
_cable_graph: Optional[CableGraph] = None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user