fix: 修复3D地球坐标映射多个严重bug

## Bug修复详情

### 1. 致命错误:球面距离计算 (calculateDistance)
- 问题:使用勾股定理计算经纬度距离,在球体表面完全错误
- 修复:改用Haversine公式计算球面大圆距离
- 影响:赤道1度=111km,极地1度=19km,原计算误差巨大

### 2. 经度范围规范化 (vector3ToLatLon)
- 问题:Math.atan2返回[-180°,180°],转换后可能超出标准范围
- 修复:添加while循环规范化到[-180, 180]区间
- 影响:避免本初子午线附近返回360°的异常值

### 3. 屏幕坐标转换支持非全屏 (screenToEarthCoords)
- 问题:假设Canvas永远全屏,非全屏时点击偏移严重
- 修复:新增domElement参数,使用getBoundingClientRect()计算相对坐标
- 影响:嵌入式3D地球组件也能精准拾取

### 4. 地球旋转时经纬度映射错误
- 问题:Raycaster返回世界坐标,未考虑地球自转
- 修复:使用earth.worldToLocal()转换到本地坐标空间
- 影响:地球旋转时经纬度显示正确跟随

## 新增功能

- CelesTrak卫星数据采集器
- Space-Track卫星数据采集器
- 卫星可视化模块(500颗,实时SGP4轨道计算)
- 海底光缆悬停显示info-card
- 统一info-card组件
- 工具栏按钮(Stellarium风格)
- 缩放控制(百分比显示)
- Docker volume映射(代码热更新)

## 文件变更

- utils.js: 坐标转换核心逻辑修复
- satellites.js: 新增卫星可视化
- cables.js: 悬停交互支持
- main.js: 悬停/锁定逻辑
- controls.js: 工具栏UI
- info-card.js: 统一卡片组件
- docker-compose.yml: volume映射
- restart.sh: 简化重启脚本
This commit is contained in:
rayd1o
2026-03-17 04:10:24 +08:00
parent 02991730e5
commit c82e1d5a04
26 changed files with 1770 additions and 248 deletions

View File

@@ -120,6 +120,20 @@ COLLECTOR_INFO = {
"priority": "P1",
"frequency_hours": 168,
},
"spacetrack_tle": {
"id": 19,
"name": "Space-Track TLE",
"module": "L3",
"priority": "P2",
"frequency_hours": 24,
},
"celestrak_tle": {
"id": 20,
"name": "CelesTrak TLE",
"module": "L3",
"priority": "P2",
"frequency_hours": 24,
},
}
ID_TO_COLLECTOR = {info["id"]: name for name, info in COLLECTOR_INFO.items()}

View File

@@ -1,8 +1,13 @@
"""Visualization API - GeoJSON endpoints for 3D Earth display"""
"""Visualization API - GeoJSON endpoints for 3D Earth display
Unified API for all visualization data sources.
Returns GeoJSON format compatible with Three.js, CesiumJS, and Unreal Cesium.
"""
from datetime import datetime
from fastapi import APIRouter, HTTPException, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from sqlalchemy import select, func
from typing import List, Dict, Any, Optional
from app.db.session import get_db
@@ -12,6 +17,9 @@ from app.services.cable_graph import build_graph_from_data, CableGraph
router = APIRouter()
# ============== Converter Functions ==============
def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert cable records to GeoJSON FeatureCollection"""
features = []
@@ -122,6 +130,117 @@ def convert_landing_point_to_geojson(records: List[CollectedData]) -> Dict[str,
return {"type": "FeatureCollection", "features": features}
def convert_satellite_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert satellite TLE records to GeoJSON"""
features = []
for record in records:
metadata = record.extra_data or {}
norad_id = metadata.get("norad_cat_id")
if not norad_id:
continue
features.append(
{
"type": "Feature",
"id": norad_id,
"geometry": {"type": "Point", "coordinates": [0, 0, 0]},
"properties": {
"id": record.id,
"norad_cat_id": norad_id,
"name": record.name,
"international_designator": metadata.get("international_designator"),
"epoch": metadata.get("epoch"),
"inclination": metadata.get("inclination"),
"raan": metadata.get("raan"),
"eccentricity": metadata.get("eccentricity"),
"arg_of_perigee": metadata.get("arg_of_perigee"),
"mean_anomaly": metadata.get("mean_anomaly"),
"mean_motion": metadata.get("mean_motion"),
"bstar": metadata.get("bstar"),
"classification_type": metadata.get("classification_type"),
"data_type": "satellite_tle",
},
}
)
return {"type": "FeatureCollection", "features": features}
def convert_supercomputer_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert TOP500 supercomputer records to GeoJSON"""
features = []
for record in records:
try:
lat = float(record.latitude) if record.latitude and record.latitude != "0.0" else None
lon = (
float(record.longitude) if record.longitude and record.longitude != "0.0" else None
)
except (ValueError, TypeError):
lat, lon = None, None
metadata = record.extra_data or {}
features.append(
{
"type": "Feature",
"id": record.id,
"geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]},
"properties": {
"id": record.id,
"name": record.name,
"rank": metadata.get("rank"),
"r_max": record.value,
"r_peak": metadata.get("r_peak"),
"cores": metadata.get("cores"),
"power": metadata.get("power"),
"country": record.country,
"city": record.city,
"data_type": "supercomputer",
},
}
)
return {"type": "FeatureCollection", "features": features}
def convert_gpu_cluster_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert GPU cluster records to GeoJSON"""
features = []
for record in records:
try:
lat = float(record.latitude) if record.latitude else None
lon = float(record.longitude) if record.longitude else None
except (ValueError, TypeError):
lat, lon = None, None
metadata = record.extra_data or {}
features.append(
{
"type": "Feature",
"id": record.id,
"geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]},
"properties": {
"id": record.id,
"name": record.name,
"country": record.country,
"city": record.city,
"metadata": metadata,
"data_type": "gpu_cluster",
},
}
)
return {"type": "FeatureCollection", "features": features}
# ============== API Endpoints ==============
@router.get("/geo/cables")
async def get_cables_geojson(db: AsyncSession = Depends(get_db)):
"""获取海底电缆 GeoJSON 数据 (LineString)"""
@@ -196,6 +315,178 @@ async def get_all_geojson(db: AsyncSession = Depends(get_db)):
}
@router.get("/geo/satellites")
async def get_satellites_geojson(
limit: int = 10000,
db: AsyncSession = Depends(get_db),
):
"""获取卫星 TLE GeoJSON 数据"""
stmt = (
select(CollectedData)
.where(CollectedData.source == "celestrak_tle")
.where(CollectedData.name != "Unknown")
.order_by(CollectedData.id.desc())
.limit(limit)
)
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
return {"type": "FeatureCollection", "features": [], "count": 0}
geojson = convert_satellite_to_geojson(list(records))
return {
**geojson,
"count": len(geojson.get("features", [])),
}
@router.get("/geo/supercomputers")
async def get_supercomputers_geojson(
limit: int = 500,
db: AsyncSession = Depends(get_db),
):
"""获取 TOP500 超算中心 GeoJSON 数据"""
stmt = (
select(CollectedData)
.where(CollectedData.source == "top500")
.where(CollectedData.name != "Unknown")
.limit(limit)
)
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
return {"type": "FeatureCollection", "features": [], "count": 0}
geojson = convert_supercomputer_to_geojson(list(records))
return {
**geojson,
"count": len(geojson.get("features", [])),
}
@router.get("/geo/gpu-clusters")
async def get_gpu_clusters_geojson(
limit: int = 100,
db: AsyncSession = Depends(get_db),
):
"""获取 GPU 集群 GeoJSON 数据"""
stmt = (
select(CollectedData)
.where(CollectedData.source == "epoch_ai_gpu")
.where(CollectedData.name != "Unknown")
.limit(limit)
)
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
return {"type": "FeatureCollection", "features": [], "count": 0}
geojson = convert_gpu_cluster_to_geojson(list(records))
return {
**geojson,
"count": len(geojson.get("features", [])),
}
@router.get("/all")
async def get_all_visualization_data(db: AsyncSession = Depends(get_db)):
"""获取所有可视化数据的统一端点
Returns GeoJSON FeatureCollections for all data types:
- satellites: 卫星 TLE 数据
- cables: 海底电缆
- landing_points: 登陆点
- supercomputers: TOP500 超算
- gpu_clusters: GPU 集群
"""
cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
cables_result = await db.execute(cables_stmt)
cables_records = list(cables_result.scalars().all())
points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
points_result = await db.execute(points_stmt)
points_records = list(points_result.scalars().all())
satellites_stmt = (
select(CollectedData)
.where(CollectedData.source == "celestrak_tle")
.where(CollectedData.name != "Unknown")
)
satellites_result = await db.execute(satellites_stmt)
satellites_records = list(satellites_result.scalars().all())
supercomputers_stmt = (
select(CollectedData)
.where(CollectedData.source == "top500")
.where(CollectedData.name != "Unknown")
)
supercomputers_result = await db.execute(supercomputers_stmt)
supercomputers_records = list(supercomputers_result.scalars().all())
gpu_stmt = (
select(CollectedData)
.where(CollectedData.source == "epoch_ai_gpu")
.where(CollectedData.name != "Unknown")
)
gpu_result = await db.execute(gpu_stmt)
gpu_records = list(gpu_result.scalars().all())
cables = (
convert_cable_to_geojson(cables_records)
if cables_records
else {"type": "FeatureCollection", "features": []}
)
landing_points = (
convert_landing_point_to_geojson(points_records)
if points_records
else {"type": "FeatureCollection", "features": []}
)
satellites = (
convert_satellite_to_geojson(satellites_records)
if satellites_records
else {"type": "FeatureCollection", "features": []}
)
supercomputers = (
convert_supercomputer_to_geojson(supercomputers_records)
if supercomputers_records
else {"type": "FeatureCollection", "features": []}
)
gpu_clusters = (
convert_gpu_cluster_to_geojson(gpu_records)
if gpu_records
else {"type": "FeatureCollection", "features": []}
)
return {
"generated_at": datetime.utcnow().isoformat() + "Z",
"version": "1.0",
"data": {
"satellites": satellites,
"cables": cables,
"landing_points": landing_points,
"supercomputers": supercomputers,
"gpu_clusters": gpu_clusters,
},
"stats": {
"total_features": (
len(satellites.get("features", []))
+ len(cables.get("features", []))
+ len(landing_points.get("features", []))
+ len(supercomputers.get("features", []))
+ len(gpu_clusters.get("features", []))
),
"satellites": len(satellites.get("features", [])),
"cables": len(cables.get("features", [])),
"landing_points": len(landing_points.get("features", [])),
"supercomputers": len(supercomputers.get("features", [])),
"gpu_clusters": len(gpu_clusters.get("features", [])),
},
}
# Cache for cable graph
_cable_graph: Optional[CableGraph] = None

View File

@@ -27,6 +27,9 @@ class Settings(BaseSettings):
CORS_ORIGINS: List[str] = ["http://localhost:3000", "http://localhost:8000"]
SPACETRACK_USERNAME: str = ""
SPACETRACK_PASSWORD: str = ""
@property
def REDIS_URL(self) -> str:
return os.getenv(
@@ -34,7 +37,7 @@ class Settings(BaseSettings):
)
class Config:
env_file = ".env"
env_file = Path(__file__).parent.parent.parent / ".env"
case_sensitive = True

View File

@@ -22,6 +22,7 @@ COLLECTOR_URL_KEYS = {
"peeringdb_facility": "peeringdb.facility_url",
"top500": "top500.url",
"epoch_ai_gpu": "epoch_ai.gpu_clusters_url",
"spacetrack_tle": "spacetrack.tle_query_url",
}

View File

@@ -33,3 +33,7 @@ top500:
epoch_ai:
gpu_clusters_url: "https://epoch.ai/data/gpu-clusters"
spacetrack:
base_url: "https://www.space-track.org"
tle_query_url: "https://www.space-track.org/basicspacedata/query/class/gp/orderby/EPOCH%20desc/limit/1000/format/json"

View File

@@ -28,6 +28,8 @@ from app.services.collectors.arcgis_cables import ArcGISCableCollector
from app.services.collectors.fao_landing import FAOLandingPointCollector
from app.services.collectors.arcgis_landing import ArcGISLandingPointCollector
from app.services.collectors.arcgis_relation import ArcGISCableLandingRelationCollector
from app.services.collectors.spacetrack import SpaceTrackTLECollector
from app.services.collectors.celestrak import CelesTrakTLECollector
collector_registry.register(TOP500Collector())
collector_registry.register(EpochAIGPUCollector())
@@ -47,3 +49,5 @@ collector_registry.register(ArcGISCableCollector())
collector_registry.register(FAOLandingPointCollector())
collector_registry.register(ArcGISLandingPointCollector())
collector_registry.register(ArcGISCableLandingRelationCollector())
collector_registry.register(SpaceTrackTLECollector())
collector_registry.register(CelesTrakTLECollector())

View File

@@ -119,6 +119,9 @@ class BaseCollector(ABC):
records_added = 0
for i, item in enumerate(data):
print(
f"DEBUG: Saving item {i}: name={item.get('name')}, metadata={item.get('metadata', 'NOT FOUND')}"
)
record = CollectedData(
source=self.name,
source_id=item.get("source_id") or item.get("id"),

View File

@@ -0,0 +1,99 @@
"""CelesTrak TLE Collector
Collects satellite TLE (Two-Line Element) data from CelesTrak.org.
Free, no authentication required.
"""
import json
from typing import Dict, Any, List
import httpx
from app.services.collectors.base import BaseCollector
class CelesTrakTLECollector(BaseCollector):
name = "celestrak_tle"
priority = "P2"
module = "L3"
frequency_hours = 24
data_type = "satellite_tle"
@property
def base_url(self) -> str:
return "https://celestrak.org/NORAD/elements/gp.php"
async def fetch(self) -> List[Dict[str, Any]]:
satellite_groups = [
"starlink",
"gps-ops",
"galileo",
"glonass",
"beidou",
"leo",
"geo",
"iridium-next",
]
all_satellites = []
async with httpx.AsyncClient(timeout=120.0) as client:
for group in satellite_groups:
try:
url = f"https://celestrak.org/NORAD/elements/gp.php?GROUP={group}&FORMAT=json"
response = await client.get(url)
if response.status_code == 200:
data = response.json()
if isinstance(data, list):
all_satellites.extend(data)
print(f"CelesTrak: Fetched {len(data)} satellites from group '{group}'")
except Exception as e:
print(f"CelesTrak: Error fetching group '{group}': {e}")
if not all_satellites:
return self._get_sample_data()
print(f"CelesTrak: Total satellites fetched: {len(all_satellites)}")
# Return raw data - base.run() will call transform()
return all_satellites
def transform(self, raw_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
transformed = []
for item in raw_data:
transformed.append(
{
"name": item.get("OBJECT_NAME", "Unknown"),
"reference_date": item.get("EPOCH", ""),
"metadata": {
"norad_cat_id": item.get("NORAD_CAT_ID"),
"international_designator": item.get("OBJECT_ID"),
"epoch": item.get("EPOCH"),
"mean_motion": item.get("MEAN_MOTION"),
"eccentricity": item.get("ECCENTRICITY"),
"inclination": item.get("INCLINATION"),
"raan": item.get("RA_OF_ASC_NODE"),
"arg_of_perigee": item.get("ARG_OF_PERICENTER"),
"mean_anomaly": item.get("MEAN_ANOMALY"),
"classification_type": item.get("CLASSIFICATION_TYPE"),
"bstar": item.get("BSTAR"),
"mean_motion_dot": item.get("MEAN_MOTION_DOT"),
"mean_motion_ddot": item.get("MEAN_MOTION_DDOT"),
"ephemeris_type": item.get("EPHEMERIS_TYPE"),
},
}
)
return transformed
def _get_sample_data(self) -> List[Dict[str, Any]]:
return [
{
"name": "STARLINK-1000",
"norad_cat_id": 44720,
"international_designator": "2019-029AZ",
"epoch": "2026-03-13T00:00:00Z",
"mean_motion": 15.79234567,
"eccentricity": 0.0001234,
"inclination": 53.0,
},
]

View File

@@ -0,0 +1,222 @@
"""Space-Track TLE Collector
Collects satellite TLE (Two-Line Element) data from Space-Track.org.
API documentation: https://www.space-track.org/documentation
"""
import json
from typing import Dict, Any, List
import httpx
from app.services.collectors.base import BaseCollector
from app.core.data_sources import get_data_sources_config
class SpaceTrackTLECollector(BaseCollector):
name = "spacetrack_tle"
priority = "P2"
module = "L3"
frequency_hours = 24
data_type = "satellite_tle"
@property
def base_url(self) -> str:
config = get_data_sources_config()
if self._resolved_url:
return self._resolved_url
return config.get_yaml_url("spacetrack_tle")
async def fetch(self) -> List[Dict[str, Any]]:
from app.core.config import settings
username = settings.SPACETRACK_USERNAME
password = settings.SPACETRACK_PASSWORD
if not username or not password:
print("SPACETRACK: No credentials configured, using sample data")
return self._get_sample_data()
print(f"SPACETRACK: Attempting to fetch TLE data with username: {username}")
try:
async with httpx.AsyncClient(
timeout=120.0,
follow_redirects=True,
headers={
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
"Accept": "application/json, text/html, */*",
"Accept-Language": "en-US,en;q=0.9",
"Referer": "https://www.space-track.org/",
},
) as client:
await client.get("https://www.space-track.org/")
login_response = await client.post(
"https://www.space-track.org/ajaxauth/login",
data={
"identity": username,
"password": password,
},
)
print(f"SPACETRACK: Login response status: {login_response.status_code}")
print(f"SPACETRACK: Login response URL: {login_response.url}")
if login_response.status_code == 403:
print("SPACETRACK: Trying alternate login method...")
async with httpx.AsyncClient(
timeout=120.0,
follow_redirects=True,
) as alt_client:
await alt_client.get("https://www.space-track.org/")
form_data = {
"username": username,
"password": password,
"query": "class/gp/NORAD_CAT_ID/25544/format/json",
}
alt_login = await alt_client.post(
"https://www.space-track.org/ajaxauth/login",
data={
"identity": username,
"password": password,
},
)
print(f"SPACETRACK: Alt login status: {alt_login.status_code}")
if alt_login.status_code == 200:
tle_response = await alt_client.get(
"https://www.space-track.org/basicspacedata/query/class/gp/NORAD_CAT_ID/25544/format/json"
)
if tle_response.status_code == 200:
data = tle_response.json()
print(f"SPACETRACK: Received {len(data)} records via alt method")
return data
if login_response.status_code != 200:
print(f"SPACETRACK: Login failed, using sample data")
return self._get_sample_data()
tle_response = await client.get(
"https://www.space-track.org/basicspacedata/query/class/gp/NORAD_CAT_ID/25544/format/json"
)
print(f"SPACETRACK: TLE query status: {tle_response.status_code}")
if tle_response.status_code != 200:
print(f"SPACETRACK: Query failed, using sample data")
return self._get_sample_data()
data = tle_response.json()
print(f"SPACETRACK: Received {len(data)} records")
return data
except Exception as e:
print(f"SPACETRACK: Error - {e}, using sample data")
return self._get_sample_data()
print(f"SPACETRACK: Attempting to fetch TLE data with username: {username}")
try:
async with httpx.AsyncClient(
timeout=120.0,
follow_redirects=True,
headers={
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
"Accept": "application/json, text/html, */*",
"Accept-Language": "en-US,en;q=0.9",
},
) as client:
# First, visit the main page to get any cookies
await client.get("https://www.space-track.org/")
# Login to get session cookie
login_response = await client.post(
"https://www.space-track.org/ajaxauth/login",
data={
"identity": username,
"password": password,
},
)
print(f"SPACETRACK: Login response status: {login_response.status_code}")
print(f"SPACETRACK: Login response URL: {login_response.url}")
print(f"SPACETRACK: Login response body: {login_response.text[:500]}")
if login_response.status_code != 200:
print(f"SPACETRACK: Login failed, using sample data")
return self._get_sample_data()
# Query for TLE data (get first 1000 satellites)
tle_response = await client.get(
"https://www.space-track.org/basicspacedata/query"
"/class/gp"
"/orderby/EPOCH%20desc"
"/limit/1000"
"/format/json"
)
print(f"SPACETRACK: TLE query status: {tle_response.status_code}")
if tle_response.status_code != 200:
print(f"SPACETRACK: Query failed, using sample data")
return self._get_sample_data()
data = tle_response.json()
print(f"SPACETRACK: Received {len(data)} records")
return data
except Exception as e:
print(f"SPACETRACK: Error - {e}, using sample data")
return self._get_sample_data()
def transform(self, raw_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""Transform TLE data to internal format"""
transformed = []
for item in raw_data:
transformed.append(
{
"name": item.get("OBJECT_NAME", "Unknown"),
"norad_cat_id": item.get("NORAD_CAT_ID"),
"international_designator": item.get("INTL_DESIGNATOR"),
"epoch": item.get("EPOCH"),
"mean_motion": item.get("MEAN_MOTION"),
"eccentricity": item.get("ECCENTRICITY"),
"inclination": item.get("INCLINATION"),
"raan": item.get("RAAN"),
"arg_of_perigee": item.get("ARG_OF_PERIGEE"),
"mean_anomaly": item.get("MEAN_ANOMALY"),
"ephemeris_type": item.get("EPHEMERIS_TYPE"),
"classification_type": item.get("CLASSIFICATION_TYPE"),
"element_set_no": item.get("ELEMENT_SET_NO"),
"rev_at_epoch": item.get("REV_AT_EPOCH"),
"bstar": item.get("BSTAR"),
"mean_motion_dot": item.get("MEAN_MOTION_DOT"),
"mean_motion_ddot": item.get("MEAN_MOTION_DDOT"),
}
)
return transformed
def _get_sample_data(self) -> List[Dict[str, Any]]:
"""Return sample TLE data for testing"""
return [
{
"name": "ISS (ZARYA)",
"norad_cat_id": 25544,
"international_designator": "1998-067A",
"epoch": "2026-03-13T00:00:00Z",
"mean_motion": 15.49872723,
"eccentricity": 0.0006292,
"inclination": 51.6400,
"raan": 315.0000,
"arg_of_perigee": 100.0000,
"mean_anomaly": 260.0000,
},
{
"name": "STARLINK-1000",
"norad_cat_id": 44720,
"international_designator": "2019-029AZ",
"epoch": "2026-03-13T00:00:00Z",
"mean_motion": 15.79234567,
"eccentricity": 0.0001234,
"inclination": 53.0000,
"raan": 120.0000,
"arg_of_perigee": 90.0000,
"mean_anomaly": 270.0000,
},
]

View File

@@ -33,6 +33,8 @@ COLLECTOR_TO_ID = {
"arcgis_landing_points": 16,
"arcgis_cable_landing_relation": 17,
"fao_landing_points": 18,
"spacetrack_tle": 19,
"celestrak_tle": 20,
}