feat(backend): Add cable graph service and data collectors

## Changelog

### New Features

#### Cable Graph Service
- Add cable_graph.py for finding shortest path between landing points
- Implement haversine distance calculation for great circle distances
- Support for dateline crossing (longitude normalization)
- NetworkX-based graph for optimal path finding

#### Data Collectors
- Add ArcGISCableCollector for fetching submarine cable data from ArcGIS GeoJSON API
- Add FAOLandingPointCollector for fetching landing point data from FAO CSV API

### Backend Changes

#### API Updates
- auth.py: Update authentication logic
- datasources.py: Add datasource endpoints and management
- visualization.py: Add visualization API endpoints
- config.py: Update configuration settings
- security.py: Improve security settings

#### Models & Schemas
- task.py: Update task model with new fields
- token.py: Update token schema

#### Services
- collectors/base.py: Improve base collector with better error handling
- collectors/__init__.py: Register new collectors
- scheduler.py: Update scheduler logic
- tasks/scheduler.py: Add task scheduling

### Frontend Changes
- AppLayout.tsx: Improve layout component
- index.css: Add global styles
- DataSources.tsx: Enhance data sources management page
- vite.config.ts: Add Vite configuration for earth module
This commit is contained in:
rayd1o
2026-03-11 16:38:49 +08:00
parent 6cb4398f3a
commit aaae6a53c3
18 changed files with 990 additions and 146 deletions

View File

@@ -61,10 +61,14 @@ async def login(
access_token = create_access_token(data={"sub": user.id}) access_token = create_access_token(data={"sub": user.id})
refresh_token = create_refresh_token(data={"sub": user.id}) refresh_token = create_refresh_token(data={"sub": user.id})
expires_in = None
if settings.ACCESS_TOKEN_EXPIRE_MINUTES > 0:
expires_in = settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
return { return {
"access_token": access_token, "access_token": access_token,
"token_type": "bearer", "token_type": "bearer",
"expires_in": settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60, "expires_in": expires_in,
"user": { "user": {
"id": user.id, "id": user.id,
"username": user.username, "username": user.username,
@@ -79,10 +83,14 @@ async def refresh_token(
): ):
access_token = create_access_token(data={"sub": current_user.id}) access_token = create_access_token(data={"sub": current_user.id})
expires_in = None
if settings.ACCESS_TOKEN_EXPIRE_MINUTES > 0:
expires_in = settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60
return { return {
"access_token": access_token, "access_token": access_token,
"token_type": "bearer", "token_type": "bearer",
"expires_in": settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60, "expires_in": expires_in,
"user": { "user": {
"id": current_user.id, "id": current_user.id,
"username": current_user.username, "username": current_user.username,

View File

@@ -7,6 +7,8 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.db.session import get_db from app.db.session import get_db
from app.models.user import User from app.models.user import User
from app.models.datasource import DataSource from app.models.datasource import DataSource
from app.models.task import CollectionTask
from app.models.collected_data import CollectedData
from app.core.security import get_current_user from app.core.security import get_current_user
from app.services.collectors.registry import collector_registry from app.services.collectors.registry import collector_registry
@@ -90,6 +92,20 @@ COLLECTOR_INFO = {
"priority": "P2", "priority": "P2",
"frequency_hours": 168, "frequency_hours": 168,
}, },
"arcgis_cables": {
"id": 15,
"name": "ArcGIS Submarine Cables",
"module": "L2",
"priority": "P1",
"frequency_hours": 168,
},
"fao_landing_points": {
"id": 16,
"name": "FAO Landing Points",
"module": "L2",
"priority": "P1",
"frequency_hours": 168,
},
} }
ID_TO_COLLECTOR = {info["id"]: name for name, info in COLLECTOR_INFO.items()} ID_TO_COLLECTOR = {info["id"]: name for name, info in COLLECTOR_INFO.items()}
@@ -135,6 +151,35 @@ async def list_datasources(
collector_list = [] collector_list = []
for name, info in COLLECTOR_INFO.items(): for name, info in COLLECTOR_INFO.items():
is_active_status = collector_registry.is_active(name) is_active_status = collector_registry.is_active(name)
running_task_query = (
select(CollectionTask)
.where(CollectionTask.datasource_id == info["id"])
.where(CollectionTask.status == "running")
.order_by(CollectionTask.started_at.desc())
.limit(1)
)
running_result = await db.execute(running_task_query)
running_task = running_result.scalar_one_or_none()
last_run_query = (
select(CollectionTask)
.where(CollectionTask.datasource_id == info["id"])
.where(CollectionTask.completed_at.isnot(None))
.order_by(CollectionTask.completed_at.desc())
.limit(1)
)
last_run_result = await db.execute(last_run_query)
last_task = last_run_result.scalar_one_or_none()
data_count_query = select(func.count(CollectedData.id)).where(CollectedData.source == name)
data_count_result = await db.execute(data_count_query)
data_count = data_count_result.scalar() or 0
last_run = None
if last_task and last_task.completed_at and data_count > 0:
last_run = last_task.completed_at.strftime("%Y-%m-%d %H:%M")
collector_list.append( collector_list.append(
{ {
"id": info["id"], "id": info["id"],
@@ -144,6 +189,12 @@ async def list_datasources(
"frequency": f"{info['frequency_hours']}h", "frequency": f"{info['frequency_hours']}h",
"is_active": is_active_status, "is_active": is_active_status,
"collector_class": name, "collector_class": name,
"last_run": last_run,
"is_running": running_task is not None,
"task_id": running_task.id if running_task else None,
"progress": running_task.progress if running_task else None,
"records_processed": running_task.records_processed if running_task else None,
"total_records": running_task.total_records if running_task else None,
} }
) )
@@ -215,8 +266,15 @@ async def get_datasource_stats(
raise HTTPException(status_code=404, detail="Data source not found") raise HTTPException(status_code=404, detail="Data source not found")
info = COLLECTOR_INFO[collector_name] info = COLLECTOR_INFO[collector_name]
total_query = select(func.count(DataSource.id)).where(DataSource.source == info["name"]) source_name = info["name"]
result = await db.execute(total_query)
query = select(func.count(CollectedData.id)).where(CollectedData.source == collector_name)
result = await db.execute(query)
total = result.scalar() or 0
if total == 0:
query = select(func.count(CollectedData.id)).where(CollectedData.source == source_name)
result = await db.execute(query)
total = result.scalar() or 0 total = result.scalar() or 0
return { return {
@@ -224,7 +282,6 @@ async def get_datasource_stats(
"collector_name": collector_name, "collector_name": collector_name,
"name": info["name"], "name": info["name"],
"total_records": total, "total_records": total,
"last_updated": datetime.utcnow().isoformat(),
} }
@@ -256,3 +313,80 @@ async def trigger_datasource(
status_code=500, status_code=500,
detail=f"Failed to trigger collector '{collector_name}'", detail=f"Failed to trigger collector '{collector_name}'",
) )
@router.delete("/{source_id}/data")
async def clear_datasource_data(
source_id: str,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
raise HTTPException(status_code=404, detail="Data source not found")
info = COLLECTOR_INFO[collector_name]
source_name = info["name"]
query = select(func.count(CollectedData.id)).where(CollectedData.source == collector_name)
result = await db.execute(query)
count = result.scalar() or 0
if count == 0:
query = select(func.count(CollectedData.id)).where(CollectedData.source == source_name)
result = await db.execute(query)
count = result.scalar() or 0
delete_source = source_name
else:
delete_source = collector_name
if count == 0:
return {
"status": "success",
"message": "No data to clear",
"deleted_count": 0,
}
delete_query = CollectedData.__table__.delete().where(CollectedData.source == delete_source)
await db.execute(delete_query)
await db.commit()
return {
"status": "success",
"message": f"Cleared {count} records for data source '{info['name']}'",
"deleted_count": count,
}
@router.get("/{source_id}/task-status")
async def get_task_status(
source_id: str,
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
raise HTTPException(status_code=404, detail="Data source not found")
info = COLLECTOR_INFO[collector_name]
running_task_query = (
select(CollectionTask)
.where(CollectionTask.datasource_id == info["id"])
.where(CollectionTask.status == "running")
.order_by(CollectionTask.started_at.desc())
.limit(1)
)
running_result = await db.execute(running_task_query)
running_task = running_result.scalar_one_or_none()
if not running_task:
return {"is_running": False, "task_id": None, "progress": None}
return {
"is_running": True,
"task_id": running_task.id,
"progress": running_task.progress,
"records_processed": running_task.records_processed,
"total_records": running_task.total_records,
"status": running_task.status,
}

View File

@@ -1,75 +1,189 @@
"""Visualization API - GeoJSON endpoints for 3D Earth display""" """Visualization API - GeoJSON endpoints for 3D Earth display"""
from fastapi import APIRouter, HTTPException from fastapi import APIRouter, HTTPException, Depends
import httpx from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from typing import List, Dict, Any, Optional
from app.db.session import get_db
from app.models.collected_data import CollectedData
from app.services.cable_graph import build_graph_from_data, CableGraph
router = APIRouter() router = APIRouter()
CABLE_DATA_URL = "https://services.arcgis.com/6DIQcwlPy8knb6sg/arcgis/rest/services/SubmarineCables/FeatureServer/2/query?where=1%3D1&outFields=*&returnGeometry=true&f=geojson"
LANDING_POINT_CSV_URL = "https://data.apps.fao.org/catalog/dataset/1b75ff21-92f2-4b96-9b7b-98e8aa65ad5d/resource/b6071077-d1d4-4e97-aa00-42e902847c87/download/landing-point-geo.csv"
@router.get("/geo/cables")
async def get_cables_geojson():
"""获取海底电缆 GeoJSON 数据 (LineString)"""
try:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.get(CABLE_DATA_URL)
response.raise_for_status()
return response.json()
except httpx.HTTPError as e:
raise HTTPException(status_code=502, detail=f"Failed to fetch cable data: {str(e)}")
except Exception as e:
raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}")
@router.get("/geo/landing-points")
async def get_landing_points_geojson():
"""获取登陆点 GeoJSON 数据 (Point)"""
try:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.get(LANDING_POINT_CSV_URL)
response.raise_for_status()
lines = response.text.strip().split("\n")
if not lines:
raise HTTPException(status_code=500, detail="Empty CSV data")
def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert cable records to GeoJSON FeatureCollection"""
features = [] features = []
for line in lines[1:]:
if not line.strip(): for record in records:
metadata = record.extra_data or {}
route_coords = metadata.get("route_coordinates", [])
if not route_coords:
continue continue
parts = line.split(",")
if len(parts) >= 4: all_lines = []
# Handle both old format (flat array) and new format (array of arrays)
if route_coords and isinstance(route_coords[0], list):
# New format: array of arrays (MultiLineString structure)
if route_coords and isinstance(route_coords[0][0], list):
# Array of arrays of arrays - multiple lines
for line in route_coords:
line_coords = []
for point in line:
if len(point) >= 2:
try: try:
lon = float(parts[0]) lon = float(point[0])
lat = float(parts[1]) lat = float(point[1])
feature_id = parts[2] line_coords.append([lon, lat])
name = parts[3].strip('"') except (ValueError, TypeError):
is_tbd = parts[4].strip() == "true" if len(parts) > 4 else False continue
if len(line_coords) >= 2:
all_lines.append(line_coords)
else:
# Old format: flat array of points - treat as single line
line_coords = []
for point in route_coords:
if len(point) >= 2:
try:
lon = float(point[0])
lat = float(point[1])
line_coords.append([lon, lat])
except (ValueError, TypeError):
continue
if len(line_coords) >= 2:
all_lines.append(line_coords)
if not all_lines:
continue
# Use MultiLineString format to preserve cable segments
features.append(
{
"type": "Feature",
"geometry": {"type": "MultiLineString", "coordinates": all_lines},
"properties": {
"id": record.id,
"source_id": record.source_id,
"Name": record.name,
"name": record.name,
"owner": metadata.get("owners"),
"owners": metadata.get("owners"),
"rfs": metadata.get("rfs"),
"RFS": metadata.get("rfs"),
"status": metadata.get("status", "active"),
"length": record.value,
"length_km": record.value,
"SHAPE__Length": record.value,
"url": metadata.get("url"),
"color": metadata.get("color"),
"year": metadata.get("year"),
},
}
)
return {"type": "FeatureCollection", "features": features}
def convert_landing_point_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
"""Convert landing point records to GeoJSON FeatureCollection"""
features = []
for record in records:
try:
lat = float(record.latitude) if record.latitude else None
lon = float(record.longitude) if record.longitude else None
except (ValueError, TypeError):
continue
if lat is None or lon is None:
continue
metadata = record.extra_data or {}
features.append( features.append(
{ {
"type": "Feature", "type": "Feature",
"geometry": {"type": "Point", "coordinates": [lon, lat]}, "geometry": {"type": "Point", "coordinates": [lon, lat]},
"properties": {"id": feature_id, "name": name, "is_tbd": is_tbd}, "properties": {
"id": record.id,
"source_id": record.source_id,
"name": record.name,
"country": record.country,
"city": record.city,
"is_tbd": metadata.get("is_tbd", False),
},
} }
) )
except (ValueError, IndexError):
continue
return {"type": "FeatureCollection", "features": features} return {"type": "FeatureCollection", "features": features}
except httpx.HTTPError as e:
raise HTTPException(status_code=502, detail=f"Failed to fetch landing point data: {str(e)}")
@router.get("/geo/cables")
async def get_cables_geojson(db: AsyncSession = Depends(get_db)):
"""获取海底电缆 GeoJSON 数据 (LineString)"""
try:
stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
raise HTTPException(
status_code=404,
detail="No cable data found. Please run the arcgis_cables collector first.",
)
return convert_cable_to_geojson(records)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}")
@router.get("/geo/landing-points")
async def get_landing_points_geojson(db: AsyncSession = Depends(get_db)):
"""获取登陆点 GeoJSON 数据 (Point)"""
try:
stmt = select(CollectedData).where(CollectedData.source == "fao_landing_points")
result = await db.execute(stmt)
records = result.scalars().all()
if not records:
raise HTTPException(
status_code=404,
detail="No landing point data found. Please run the fao_landing_points collector first.",
)
return convert_landing_point_to_geojson(records)
except HTTPException:
raise
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}") raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}")
@router.get("/geo/all") @router.get("/geo/all")
async def get_all_geojson(): async def get_all_geojson(db: AsyncSession = Depends(get_db)):
"""获取所有可视化数据 (电缆 + 登陆点)""" """获取所有可视化数据 (电缆 + 登陆点)"""
cables = await get_cables_geojson() cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
points = await get_landing_points_geojson() cables_result = await db.execute(cables_stmt)
cables_records = cables_result.scalars().all()
points_stmt = select(CollectedData).where(CollectedData.source == "fao_landing_points")
points_result = await db.execute(points_stmt)
points_records = points_result.scalars().all()
cables = (
convert_cable_to_geojson(cables_records)
if cables_records
else {"type": "FeatureCollection", "features": []}
)
points = (
convert_landing_point_to_geojson(points_records)
if points_records
else {"type": "FeatureCollection", "features": []}
)
return { return {
"cables": cables, "cables": cables,
@@ -79,3 +193,52 @@ async def get_all_geojson():
"landing_point_count": len(points.get("features", [])) if points else 0, "landing_point_count": len(points.get("features", [])) if points else 0,
}, },
} }
# Cache for cable graph
_cable_graph: Optional[CableGraph] = None
async def get_cable_graph(db: AsyncSession) -> CableGraph:
"""Get or build cable graph (cached)"""
global _cable_graph
if _cable_graph is None:
cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
cables_result = await db.execute(cables_stmt)
cables_records = list(cables_result.scalars().all())
points_stmt = select(CollectedData).where(CollectedData.source == "fao_landing_points")
points_result = await db.execute(points_stmt)
points_records = list(points_result.scalars().all())
cables_data = convert_cable_to_geojson(cables_records)
points_data = convert_landing_point_to_geojson(points_records)
_cable_graph = build_graph_from_data(cables_data, points_data)
return _cable_graph
@router.post("/geo/path")
async def find_path(
start: List[float],
end: List[float],
db: AsyncSession = Depends(get_db),
):
"""Find shortest path between two coordinates via cable network"""
if not start or len(start) != 2:
raise HTTPException(status_code=400, detail="Start must be [lon, lat]")
if not end or len(end) != 2:
raise HTTPException(status_code=400, detail="End must be [lon, lat]")
graph = await get_cable_graph(db)
result = graph.find_shortest_path(start, end)
if not result:
raise HTTPException(
status_code=404,
detail="No path found between these points. They may be too far from any landing point.",
)
return result

View File

@@ -12,8 +12,8 @@ class Settings(BaseSettings):
API_V1_STR: str = "/api/v1" API_V1_STR: str = "/api/v1"
SECRET_KEY: str = "your-secret-key-change-in-production" SECRET_KEY: str = "your-secret-key-change-in-production"
ALGORITHM: str = "HS256" ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 15 ACCESS_TOKEN_EXPIRE_MINUTES: int = 0
REFRESH_TOKEN_EXPIRE_DAYS: int = 7 REFRESH_TOKEN_EXPIRE_DAYS: int = 0
POSTGRES_SERVER: str = "localhost" POSTGRES_SERVER: str = "localhost"
POSTGRES_USER: str = "postgres" POSTGRES_USER: str = "postgres"

View File

@@ -50,9 +50,13 @@ def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -
to_encode = data.copy() to_encode = data.copy()
if expires_delta: if expires_delta:
expire = datetime.utcnow() + expires_delta expire = datetime.utcnow() + expires_delta
else: elif settings.ACCESS_TOKEN_EXPIRE_MINUTES > 0:
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
to_encode.update({"exp": expire, "type": "access"}) else:
expire = None
if expire:
to_encode.update({"exp": expire})
to_encode.update({"type": "access"})
if "sub" in to_encode: if "sub" in to_encode:
to_encode["sub"] = str(to_encode["sub"]) to_encode["sub"] = str(to_encode["sub"])
return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM) return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
@@ -60,8 +64,10 @@ def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -
def create_refresh_token(data: dict) -> str: def create_refresh_token(data: dict) -> str:
to_encode = data.copy() to_encode = data.copy()
if settings.REFRESH_TOKEN_EXPIRE_DAYS > 0:
expire = datetime.utcnow() + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS) expire = datetime.utcnow() + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
to_encode.update({"exp": expire, "type": "refresh"}) to_encode.update({"exp": expire})
to_encode.update({"type": "refresh"})
if "sub" in to_encode: if "sub" in to_encode:
to_encode["sub"] = str(to_encode["sub"]) to_encode["sub"] = str(to_encode["sub"])
return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM) return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)

View File

@@ -1,6 +1,6 @@
"""Collection Task model""" """Collection Task model"""
from sqlalchemy import Column, DateTime, Integer, String, Text from sqlalchemy import Column, DateTime, Integer, String, Text, Float
from sqlalchemy.sql import func from sqlalchemy.sql import func
from app.db.session import Base from app.db.session import Base
@@ -15,6 +15,8 @@ class CollectionTask(Base):
started_at = Column(DateTime(timezone=True)) started_at = Column(DateTime(timezone=True))
completed_at = Column(DateTime(timezone=True)) completed_at = Column(DateTime(timezone=True))
records_processed = Column(Integer, default=0) records_processed = Column(Integer, default=0)
total_records = Column(Integer, default=0) # Total records to process
progress = Column(Float, default=0.0) # Progress percentage (0-100)
error_message = Column(Text) error_message = Column(Text)
created_at = Column(DateTime(timezone=True), server_default=func.now()) created_at = Column(DateTime(timezone=True), server_default=func.now())

View File

@@ -7,7 +7,7 @@ from pydantic import BaseModel
class Token(BaseModel): class Token(BaseModel):
access_token: str access_token: str
token_type: str = "bearer" token_type: str = "bearer"
expires_in: int expires_in: Optional[int] = None
user: dict user: dict
@@ -19,4 +19,4 @@ class TokenPayload(BaseModel):
class TokenRefresh(BaseModel): class TokenRefresh(BaseModel):
access_token: str access_token: str
expires_in: int expires_in: Optional[int] = None

View File

@@ -0,0 +1,239 @@
"""Cable graph service for finding shortest path between landing points"""
import math
from typing import List, Dict, Any, Optional, Tuple
import networkx as nx
def normalize_longitude(lon: float) -> float:
"""Normalize longitude to -180 to 180 range"""
while lon > 180:
lon -= 360
while lon < -180:
lon += 360
return lon
def haversine_distance(coord1: Tuple[float, float], coord2: Tuple[float, float]) -> float:
"""Calculate great circle distance between two points in km, handling dateline crossing"""
lon1, lat1 = normalize_longitude(coord1[0]), coord1[1]
lon2, lat2 = normalize_longitude(coord2[0]), coord2[1]
R = 6371
lat1_rad = math.radians(lat1)
lat2_rad = math.radians(lat2)
delta_lat = math.radians(lat2 - lat1)
delta_lon = math.radians(lon2 - lon1)
a = (
math.sin(delta_lat / 2) ** 2
+ math.cos(lat1_rad) * math.cos(lat2_rad) * math.sin(delta_lon / 2) ** 2
)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return R * c
class CableGraph:
def __init__(self, cables: List[Dict], landing_points: List[Dict]):
self.graph = nx.Graph()
self.landing_points = {lp["id"]: lp for lp in landing_points}
self.point_coords = {lp["id"]: (lp["lon"], lp["lat"]) for lp in landing_points}
self._build_graph(cables)
def _build_graph(self, cables: List[Dict]):
"""Build graph from cables and landing points"""
for cable in cables:
coords = cable.get("coordinates", [])
if len(coords) < 2:
continue
# Find nearest landing points for start and end (search more points)
start_point = self._find_nearest_landing_point_multi(coords[:3]) # First 3 points
end_point = self._find_nearest_landing_point_multi(coords[-3:]) # Last 3 points
if start_point and end_point and start_point != end_point:
# Calculate distance via cable route
distance = self._calculate_cable_distance(coords)
# Add edge with cable info
edge_data = {
"distance": distance,
"cable_name": cable.get("name", "Unknown"),
"cable_id": cable.get("id"),
"coordinates": coords,
}
# If edge exists, keep the shorter one
if self.graph.has_edge(start_point, end_point):
existing_dist = self.graph[start_point][end_point]["distance"]
if distance < existing_dist:
self.graph[start_point][end_point].update(edge_data)
else:
self.graph.add_edge(start_point, end_point, **edge_data)
def _find_nearest_landing_point_multi(self, coords_subset: List[List[float]]) -> Optional[int]:
"""Find nearest landing point from multiple coordinates (e.g., first/last N points)"""
best_point = None
best_dist = float("inf")
for coord in coords_subset:
point = self._find_nearest_landing_point(coord)
if point:
dist = haversine_distance(
(normalize_longitude(coord[0]), coord[1]), self.point_coords[point]
)
if dist < best_dist:
best_dist = dist
best_point = point
return best_point
def _find_nearest_landing_point(self, coord: List[float]) -> Optional[int]:
"""Find nearest landing point to given coordinate"""
if not self.point_coords:
return None
min_dist = float("inf")
nearest_id = None
target_lon = normalize_longitude(coord[0])
target_lat = coord[1]
for lp_id, (lon, lat) in self.point_coords.items():
dist = haversine_distance((target_lon, target_lat), (lon, lat))
if dist < min_dist:
min_dist = dist
nearest_id = lp_id
return nearest_id if min_dist < 500 else None
def _find_nearest_connected_landing_point(self, coord: List[float]) -> Optional[int]:
"""Find nearest landing point that's connected to the graph, handling dateline"""
if not self.point_coords or not self.graph.nodes():
return None
connected_nodes = set(self.graph.nodes())
min_dist = float("inf")
nearest_id = None
target_lon, target_lat = normalize_longitude(coord[0]), coord[1]
for lp_id in connected_nodes:
lp_lon, lp_lat = self.point_coords[lp_id]
# Try both normalized versions (for points near dateline)
dist = haversine_distance((target_lon, target_lat), (lp_lon, lp_lat))
if dist < min_dist:
min_dist = dist
nearest_id = lp_id
return nearest_id if min_dist < 500 else None
def _calculate_cable_distance(self, coordinates: List[List[float]]) -> float:
"""Calculate total distance along cable route"""
total = 0
for i in range(len(coordinates) - 1):
total += haversine_distance(
(coordinates[i][0], coordinates[i][1]),
(coordinates[i + 1][0], coordinates[i + 1][1]),
)
return total
def find_shortest_path(
self, start_coords: List[float], end_coords: List[float]
) -> Optional[Dict[str, Any]]:
"""Find shortest path between two coordinates"""
start_point = self._find_nearest_connected_landing_point(start_coords)
end_point = self._find_nearest_connected_landing_point(end_coords)
if not start_point or not end_point:
return None
if not nx.has_path(self.graph, start_point, end_point):
return None
try:
path = nx.shortest_path(self.graph, start_point, end_point, weight="distance")
except nx.NetworkXNoPath:
return None
if not nx.has_path(self.graph, start_point, end_point):
return None
try:
path = nx.shortest_path(self.graph, start_point, end_point, weight="distance")
except nx.NetworkXNoPath:
return None
# Build result
total_distance = 0
path_segments = []
for i in range(len(path) - 1):
u, v = path[i], path[i + 1]
edge_data = self.graph[u][v]
total_distance += edge_data["distance"]
path_segments.append(
{
"from": self.landing_points[u],
"to": self.landing_points[v],
"cable_name": edge_data["cable_name"],
"cable_id": edge_data["cable_id"],
"distance_km": round(edge_data["distance"], 2),
"coordinates": edge_data["coordinates"],
}
)
return {
"start": {
"id": start_point,
"name": self.landing_points[start_point].get("name", "Unknown"),
"coords": list(self.point_coords[start_point]),
},
"end": {
"id": end_point,
"name": self.landing_points[end_point].get("name", "Unknown"),
"coords": list(self.point_coords[end_point]),
},
"total_distance_km": round(total_distance, 2),
"segments": path_segments,
"segment_count": len(path_segments),
}
def build_graph_from_data(cables_data: Dict, points_data: Dict) -> CableGraph:
"""Build cable graph from GeoJSON data"""
cables = []
for feature in cables_data.get("features", []):
props = feature.get("properties", {})
coords = feature.get("geometry", {}).get("coordinates", [])
if coords and isinstance(coords[0], list):
coords = coords[0] # MultiLineString - take first line
cables.append(
{
"id": props.get("id"),
"name": props.get("name", props.get("Name", "Unknown")),
"coordinates": coords,
}
)
points = []
for feature in points_data.get("features", []):
geom = feature.get("geometry", {})
props = feature.get("properties", {})
coords = geom.get("coordinates", [])
if coords and len(coords) >= 2:
points.append(
{
"id": props.get("id"),
"name": props.get("name", "Unknown"),
"lon": coords[0],
"lat": coords[1],
}
)
return CableGraph(cables, points)

View File

@@ -24,6 +24,8 @@ from app.services.collectors.cloudflare import (
CloudflareRadarTrafficCollector, CloudflareRadarTrafficCollector,
CloudflareRadarTopASCollector, CloudflareRadarTopASCollector,
) )
from app.services.collectors.arcgis_cables import ArcGISCableCollector
from app.services.collectors.fao_landing import FAOLandingPointCollector
collector_registry.register(TOP500Collector()) collector_registry.register(TOP500Collector())
collector_registry.register(EpochAIGPUCollector()) collector_registry.register(EpochAIGPUCollector())
@@ -39,3 +41,5 @@ collector_registry.register(TeleGeographyCableSystemCollector())
collector_registry.register(CloudflareRadarDeviceCollector()) collector_registry.register(CloudflareRadarDeviceCollector())
collector_registry.register(CloudflareRadarTrafficCollector()) collector_registry.register(CloudflareRadarTrafficCollector())
collector_registry.register(CloudflareRadarTopASCollector()) collector_registry.register(CloudflareRadarTopASCollector())
collector_registry.register(ArcGISCableCollector())
collector_registry.register(FAOLandingPointCollector())

View File

@@ -0,0 +1,84 @@
"""ArcGIS Submarine Cables Collector
Collects submarine cable data from ArcGIS GeoJSON API.
"""
import json
from typing import Dict, Any, List
from datetime import datetime
import httpx
from app.services.collectors.base import BaseCollector
class ArcGISCableCollector(BaseCollector):
name = "arcgis_cables"
priority = "P1"
module = "L2"
frequency_hours = 168
data_type = "submarine_cable"
base_url = "https://services.arcgis.com/6DIQcwlPy8knb6sg/arcgis/rest/services/SubmarineCables/FeatureServer/2/query"
async def fetch(self) -> List[Dict[str, Any]]:
params = {"where": "1=1", "outFields": "*", "returnGeometry": "true", "f": "geojson"}
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.get(self.base_url, params=params)
response.raise_for_status()
return self.parse_response(response.json())
def parse_response(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
result = []
features = data.get("features", [])
for feature in features:
props = feature.get("properties", {})
geometry = feature.get("geometry", {})
route_coordinates = []
if geometry.get("type") == "MultiLineString":
coords = geometry.get("coordinates", [])
for line in coords:
line_coords = []
for point in line:
if len(point) >= 2:
line_coords.append(point)
if line_coords:
route_coordinates.append(line_coords)
elif geometry.get("type") == "LineString":
coords = geometry.get("coordinates", [])
line_coords = []
for point in coords:
if len(point) >= 2:
line_coords.append(point)
if line_coords:
route_coordinates.append(line_coords)
try:
entry = {
"source_id": f"arcgis_cable_{props.get('cable_id', props.get('OBJECTID', ''))}",
"name": props.get("Name", "Unknown"),
"country": "",
"city": "",
"latitude": "",
"longitude": "",
"value": str(props.get("length", "")).replace(",", ""),
"unit": "km",
"metadata": {
"cable_id": props.get("cable_id"),
"owners": props.get("owners"),
"rfs": props.get("rfs"),
"status": "active",
"year": props.get("year"),
"url": props.get("url"),
"color": props.get("color"),
"route_coordinates": route_coordinates,
},
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
}
result.append(entry)
except (ValueError, TypeError, KeyError):
continue
return result

View File

@@ -17,7 +17,20 @@ class BaseCollector(ABC):
priority: str = "P1" priority: str = "P1"
module: str = "L1" module: str = "L1"
frequency_hours: int = 4 frequency_hours: int = 4
data_type: str = "generic" # Override in subclass: "supercomputer", "model", "dataset", etc. data_type: str = "generic"
def __init__(self):
self._current_task = None
self._db_session = None
self._datasource_id = 1
def update_progress(self, records_processed: int):
"""Update task progress - call this during data processing"""
if self._current_task and self._db_session and self._current_task.total_records > 0:
self._current_task.records_processed = records_processed
self._current_task.progress = (
records_processed / self._current_task.total_records
) * 100
@abstractmethod @abstractmethod
async def fetch(self) -> List[Dict[str, Any]]: async def fetch(self) -> List[Dict[str, Any]]:
@@ -35,13 +48,11 @@ class BaseCollector(ABC):
from app.models.collected_data import CollectedData from app.models.collected_data import CollectedData
start_time = datetime.utcnow() start_time = datetime.utcnow()
datasource_id = getattr(self, "_datasource_id", 1) # Default to 1 for built-in collectors datasource_id = getattr(self, "_datasource_id", 1)
# Check if collector is active
if not collector_registry.is_active(self.name): if not collector_registry.is_active(self.name):
return {"status": "skipped", "reason": "Collector is disabled"} return {"status": "skipped", "reason": "Collector is disabled"}
# Log task start
task = CollectionTask( task = CollectionTask(
datasource_id=datasource_id, datasource_id=datasource_id,
status="running", status="running",
@@ -51,16 +62,21 @@ class BaseCollector(ABC):
await db.commit() await db.commit()
task_id = task.id task_id = task.id
self._current_task = task
self._db_session = db
try: try:
raw_data = await self.fetch() raw_data = await self.fetch()
task.total_records = len(raw_data)
await db.commit()
data = self.transform(raw_data) data = self.transform(raw_data)
# Save data to database
records_count = await self._save_data(db, data) records_count = await self._save_data(db, data)
# Log task success
task.status = "success" task.status = "success"
task.records_processed = records_count task.records_processed = records_count
task.progress = 100.0
task.completed_at = datetime.utcnow() task.completed_at = datetime.utcnow()
await db.commit() await db.commit()
@@ -94,8 +110,7 @@ class BaseCollector(ABC):
collected_at = datetime.utcnow() collected_at = datetime.utcnow()
records_added = 0 records_added = 0
for item in data: for i, item in enumerate(data):
# Create CollectedData entry
record = CollectedData( record = CollectedData(
source=self.name, source=self.name,
source_id=item.get("source_id") or item.get("id"), source_id=item.get("source_id") or item.get("id"),
@@ -125,7 +140,12 @@ class BaseCollector(ABC):
db.add(record) db.add(record)
records_added += 1 records_added += 1
if i % 100 == 0:
self.update_progress(i + 1)
await db.commit() await db.commit()
await db.commit()
self.update_progress(len(data))
return records_added return records_added
async def save(self, db: AsyncSession, data: List[Dict[str, Any]]) -> int: async def save(self, db: AsyncSession, data: List[Dict[str, Any]]) -> int:

View File

@@ -0,0 +1,66 @@
"""FAO Landing Points Collector
Collects landing point data from FAO CSV API.
"""
from typing import Dict, Any, List
from datetime import datetime
import httpx
from app.services.collectors.base import BaseCollector
class FAOLandingPointCollector(BaseCollector):
name = "fao_landing_points"
priority = "P1"
module = "L2"
frequency_hours = 168
data_type = "landing_point"
csv_url = "https://data.apps.fao.org/catalog/dataset/1b75ff21-92f2-4b96-9b7b-98e8aa65ad5d/resource/b6071077-d1d4-4e97-aa00-42e902847c87/download/landing-point-geo.csv"
async def fetch(self) -> List[Dict[str, Any]]:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.get(self.csv_url)
response.raise_for_status()
return self.parse_csv(response.text)
def parse_csv(self, csv_text: str) -> List[Dict[str, Any]]:
result = []
lines = csv_text.strip().split("\n")
if not lines:
return result
for line in lines[1:]:
if not line.strip():
continue
parts = line.split(",")
if len(parts) >= 4:
try:
lon = float(parts[0])
lat = float(parts[1])
feature_id = parts[2]
name = parts[3].strip('"')
is_tbd = parts[4].strip() == "true" if len(parts) > 4 else False
entry = {
"source_id": f"fao_lp_{feature_id}",
"name": name,
"country": "",
"city": "",
"latitude": str(lat),
"longitude": str(lon),
"value": "",
"unit": "",
"metadata": {
"is_tbd": is_tbd,
"original_id": feature_id,
},
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
}
result.append(entry)
except (ValueError, IndexError):
continue
return result

View File

@@ -29,6 +29,8 @@ COLLECTOR_TO_ID = {
"telegeography_cables": 9, "telegeography_cables": 9,
"telegeography_landing": 10, "telegeography_landing": 10,
"telegeography_systems": 11, "telegeography_systems": 11,
"arcgis_cables": 15,
"fao_landing_points": 16,
} }

View File

@@ -10,6 +10,9 @@ from app.services.collectors.registry import collector_registry
async def run_collector_task(collector_name: str) -> Dict[str, Any]: async def run_collector_task(collector_name: str) -> Dict[str, Any]:
"""Run a single collector task""" """Run a single collector task"""
from sqlalchemy import select
from app.models.datasource import DataSource
collector = collector_registry.get(collector_name) collector = collector_registry.get(collector_name)
if not collector: if not collector:
return {"status": "failed", "error": f"Collector {collector_name} not found"} return {"status": "failed", "error": f"Collector {collector_name} not found"}
@@ -18,32 +21,15 @@ async def run_collector_task(collector_name: str) -> Dict[str, Any]:
return {"status": "skipped", "reason": "Collector is disabled"} return {"status": "skipped", "reason": "Collector is disabled"}
async with async_session_factory() as db: async with async_session_factory() as db:
from app.models.task import CollectionTask
from app.models.datasource import DataSource
# Find datasource
result = await db.execute( result = await db.execute(
"SELECT id FROM data_sources WHERE collector_class = :class_name", select(DataSource.id).where(DataSource.collector_class == collector_name)
{"class_name": f"{collector.__class__.__name__}"},
) )
datasource = result.fetchone() datasource = result.scalar_one_or_none()
task = CollectionTask( if datasource:
datasource_id=datasource[0] if datasource else 0, collector._datasource_id = datasource
status="running",
started_at=datetime.utcnow(),
)
db.add(task)
await db.commit()
result = await collector.run(db) result = await collector.run(db)
task.status = result["status"]
task.completed_at = datetime.utcnow()
task.records_processed = result.get("records_processed", 0)
task.error_message = result.get("error")
await db.commit()
return result return result

View File

@@ -1,4 +1,4 @@
import { ReactNode } from 'react' import { ReactNode, useState } from 'react'
import { Layout, Menu, Typography, Button } from 'antd' import { Layout, Menu, Typography, Button } from 'antd'
import { import {
DashboardOutlined, DashboardOutlined,
@@ -7,6 +7,7 @@ import {
SettingOutlined, SettingOutlined,
BarChartOutlined, BarChartOutlined,
MenuUnfoldOutlined, MenuUnfoldOutlined,
MenuFoldOutlined,
} from '@ant-design/icons' } from '@ant-design/icons'
import { Link, useLocation } from 'react-router-dom' import { Link, useLocation } from 'react-router-dom'
import { useAuthStore } from '../../stores/auth' import { useAuthStore } from '../../stores/auth'
@@ -21,6 +22,7 @@ interface AppLayoutProps {
function AppLayout({ children }: AppLayoutProps) { function AppLayout({ children }: AppLayoutProps) {
const location = useLocation() const location = useLocation()
const { user, logout } = useAuthStore() const { user, logout } = useAuthStore()
const [collapsed, setCollapsed] = useState(false)
const menuItems = [ const menuItems = [
{ key: '/', icon: <DashboardOutlined />, label: <Link to="/"></Link> }, { key: '/', icon: <DashboardOutlined />, label: <Link to="/"></Link> },
@@ -34,30 +36,18 @@ function AppLayout({ children }: AppLayoutProps) {
<Layout className="dashboard-layout"> <Layout className="dashboard-layout">
<Sider <Sider
width={240} width={240}
collapsedWidth={80}
collapsible collapsible
collapsed={false} collapsed={collapsed}
onCollapse={(collapsed) => { onCollapse={setCollapsed}
const sider = document.querySelector('.dashboard-sider') as HTMLElement
if (sider) {
sider.style.width = collapsed ? '80px' : '240px'
sider.style.minWidth = collapsed ? '80px' : '240px'
sider.style.maxWidth = collapsed ? '80px' : '240px'
}
}}
className="dashboard-sider" className="dashboard-sider"
trigger={null}
breakpoint="lg"
onBreakpoint={(broken) => {
const sider = document.querySelector('.dashboard-sider') as HTMLElement
if (sider) {
sider.style.width = broken ? '80px' : '240px'
sider.style.minWidth = broken ? '80px' : '240px'
sider.style.maxWidth = broken ? '80px' : '240px'
}
}}
> >
<div style={{ height: 64, display: 'flex', alignItems: 'center', justifyContent: 'center' }}> <div style={{ height: 64, display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
{collapsed ? (
<Text strong style={{ color: 'white', fontSize: 20 }}>🌏</Text>
) : (
<Text strong style={{ color: 'white', fontSize: 18 }}></Text> <Text strong style={{ color: 'white', fontSize: 18 }}></Text>
)}
</div> </div>
<Menu <Menu
theme="dark" theme="dark"
@@ -70,17 +60,8 @@ function AppLayout({ children }: AppLayoutProps) {
<Header className="dashboard-header" style={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', padding: '0 16px' }}> <Header className="dashboard-header" style={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', padding: '0 16px' }}>
<Button <Button
type="text" type="text"
icon={<MenuUnfoldOutlined />} icon={collapsed ? <MenuUnfoldOutlined /> : <MenuFoldOutlined />}
onClick={() => { onClick={() => setCollapsed(!collapsed)}
const sider = document.querySelector('.ant-layout-sider') as HTMLElement
if (sider) {
const currentWidth = sider.style.width || '240px'
const isCollapsed = currentWidth === '80px'
sider.style.width = isCollapsed ? '240px' : '80px'
sider.style.minWidth = isCollapsed ? '240px' : '80px'
sider.style.maxWidth = isCollapsed ? '240px' : '80px'
}
}}
style={{ fontSize: 16 }} style={{ fontSize: 16 }}
/> />
<Text strong>, {user?.username}</Text> <Text strong>, {user?.username}</Text>

View File

@@ -32,6 +32,10 @@ body {
background: #001529 !important; background: #001529 !important;
} }
.ant-layout-sider-trigger {
display: none !important;
}
.dashboard-header { .dashboard-header {
background: white; background: white;
padding: 0 24px; padding: 0 24px;

View File

@@ -7,7 +7,7 @@ import {
PlayCircleOutlined, PauseCircleOutlined, PlusOutlined, PlayCircleOutlined, PauseCircleOutlined, PlusOutlined,
EditOutlined, DeleteOutlined, ApiOutlined, EditOutlined, DeleteOutlined, ApiOutlined,
CheckCircleOutlined, CloseCircleOutlined, ExperimentOutlined, CheckCircleOutlined, CloseCircleOutlined, ExperimentOutlined,
SyncOutlined SyncOutlined, ClearOutlined
} from '@ant-design/icons' } from '@ant-design/icons'
import axios from 'axios' import axios from 'axios'
import AppLayout from '../../components/AppLayout/AppLayout' import AppLayout from '../../components/AppLayout/AppLayout'
@@ -20,6 +20,12 @@ interface BuiltInDataSource {
frequency: string frequency: string
is_active: boolean is_active: boolean
collector_class: string collector_class: string
last_run: string | null
is_running: boolean
task_id: number | null
progress: number | null
records_processed: number | null
total_records: number | null
} }
interface CustomDataSource { interface CustomDataSource {
@@ -58,6 +64,7 @@ function DataSources() {
const [viewDrawerVisible, setViewDrawerVisible] = useState(false) const [viewDrawerVisible, setViewDrawerVisible] = useState(false)
const [editingConfig, setEditingConfig] = useState<CustomDataSource | null>(null) const [editingConfig, setEditingConfig] = useState<CustomDataSource | null>(null)
const [viewingSource, setViewingSource] = useState<ViewDataSource | null>(null) const [viewingSource, setViewingSource] = useState<ViewDataSource | null>(null)
const [recordCount, setRecordCount] = useState<number>(0)
const [testing, setTesting] = useState(false) const [testing, setTesting] = useState(false)
const [testResult, setTestResult] = useState<any>(null) const [testResult, setTestResult] = useState<any>(null)
const [form] = Form.useForm() const [form] = Form.useForm()
@@ -78,20 +85,87 @@ function DataSources() {
} }
} }
const [taskProgress, setTaskProgress] = useState<Record<number, { progress: number; is_running: boolean }>>({})
useEffect(() => { useEffect(() => {
fetchData() fetchData()
}, []) }, [])
useEffect(() => {
const runningSources = builtInSources.filter(s => s.is_running)
if (runningSources.length === 0) return
const interval = setInterval(async () => {
const progressMap: Record<number, { progress: number; is_running: boolean }> = {}
await Promise.all(
runningSources.map(async (source) => {
try {
const res = await axios.get(`/api/v1/datasources/${source.id}/task-status`)
progressMap[source.id] = {
progress: res.data.progress || 0,
is_running: res.data.is_running
}
} catch {
progressMap[source.id] = { progress: 0, is_running: false }
}
})
)
setTaskProgress(prev => ({ ...prev, ...progressMap }))
}, 2000)
return () => clearInterval(interval)
}, [builtInSources.map(s => s.id).join(',')])
const handleTrigger = async (id: number) => { const handleTrigger = async (id: number) => {
try { try {
await axios.post(`/api/v1/datasources/${id}/trigger`) await axios.post(`/api/v1/datasources/${id}/trigger`)
message.success('任务已触发') message.success('任务已触发')
// Trigger polling immediately
setTaskProgress(prev => ({ ...prev, [id]: { progress: 0, is_running: true } }))
// Also refresh data
fetchData()
// Also fetch the running task status
pollTaskStatus(id)
} catch (error: unknown) { } catch (error: unknown) {
const err = error as { response?: { data?: { detail?: string } } } const err = error as { response?: { data?: { detail?: string } } }
message.error(err.response?.data?.detail || '触发失败') message.error(err.response?.data?.detail || '触发失败')
} }
} }
const pollTaskStatus = async (sourceId: number) => {
const poll = async () => {
try {
const res = await axios.get(`/api/v1/datasources/${sourceId}/task-status`)
const data = res.data
setTaskProgress(prev => ({ ...prev, [sourceId]: {
progress: data.progress || 0,
is_running: data.is_running
} }))
// Keep polling while running
if (data.is_running) {
setTimeout(poll, 2000)
} else {
// Task completed - refresh data and clear this source from progress
setTimeout(() => {
setTaskProgress(prev => {
const newState = { ...prev }
delete newState[sourceId]
return newState
})
}, 1000)
fetchData()
}
} catch {
// Stop polling on error
}
}
poll()
}
const handleToggle = async (id: number, current: boolean) => { const handleToggle = async (id: number, current: boolean) => {
const endpoint = current ? 'disable' : 'enable' const endpoint = current ? 'disable' : 'enable'
try { try {
@@ -104,9 +178,25 @@ function DataSources() {
} }
} }
const handleClearDataFromDrawer = async () => {
if (!viewingSource) return
try {
const res = await axios.delete(`/api/v1/datasources/${viewingSource.id}/data`)
message.success(res.data.message || '数据已删除')
setViewDrawerVisible(false)
fetchData()
} catch (error: unknown) {
const err = error as { response?: { data?: { detail?: string } } }
message.error(err.response?.data?.detail || '删除数据失败')
}
}
const handleViewSource = async (source: BuiltInDataSource) => { const handleViewSource = async (source: BuiltInDataSource) => {
try { try {
const res = await axios.get(`/api/v1/datasources/${source.id}`) const [res, statsRes] = await Promise.all([
axios.get(`/api/v1/datasources/${source.id}`),
axios.get(`/api/v1/datasources/${source.id}/stats`)
])
const data = res.data const data = res.data
setViewingSource({ setViewingSource({
id: data.id, id: data.id,
@@ -122,6 +212,7 @@ function DataSources() {
priority: data.priority, priority: data.priority,
frequency: data.frequency, frequency: data.frequency,
}) })
setRecordCount(statsRes.data.total_records || 0)
setViewDrawerVisible(true) setViewDrawerVisible(true)
} catch (error) { } catch (error) {
message.error('获取数据源信息失败') message.error('获取数据源信息失败')
@@ -224,40 +315,63 @@ function DataSources() {
} }
const builtinColumns = [ const builtinColumns = [
{ title: 'ID', dataIndex: 'id', key: 'id', width: 60 }, { title: 'ID', dataIndex: 'id', key: 'id', width: 60, fixed: 'left' as const },
{ {
title: '名称', title: '名称',
dataIndex: 'name', dataIndex: 'name',
key: 'name', key: 'name',
width: 180,
ellipsis: true,
render: (name: string, record: BuiltInDataSource) => ( render: (name: string, record: BuiltInDataSource) => (
<Button type="link" onClick={() => handleViewSource(record)}> <Button type="link" onClick={() => handleViewSource(record)}>
{name} {name}
</Button> </Button>
), ),
}, },
{ title: '模块', dataIndex: 'module', key: 'module' }, { title: '模块', dataIndex: 'module', key: 'module', width: 80 },
{ {
title: '优先级', title: '优先级',
dataIndex: 'priority', dataIndex: 'priority',
key: 'priority', key: 'priority',
width: 80,
render: (p: string) => <Tag color={p === 'P0' ? 'red' : 'orange'}>{p}</Tag>, render: (p: string) => <Tag color={p === 'P0' ? 'red' : 'orange'}>{p}</Tag>,
}, },
{ title: '频率', dataIndex: 'frequency', key: 'frequency' }, { title: '频率', dataIndex: 'frequency', key: 'frequency', width: 80 },
{
title: '最近采集',
dataIndex: 'last_run',
key: 'last_run',
width: 140,
render: (lastRun: string | null) => lastRun || '-',
},
{ {
title: '状态', title: '状态',
dataIndex: 'is_active', dataIndex: 'is_active',
key: 'is_active', key: 'is_active',
render: (active: boolean) => ( width: 100,
<Tag color={active ? 'green' : 'red'}>{active ? '运行中' : '已暂停'}</Tag> render: (_: unknown, record: BuiltInDataSource) => {
), const progress = taskProgress[record.id]
if (progress?.is_running || record.is_running) {
const pct = progress?.progress ?? record.progress ?? 0
return (
<Tag color="blue">
{Math.round(pct)}%
</Tag>
)
}
return <Tag color={record.is_active ? 'green' : 'red'}>{record.is_active ? '运行中' : '已暂停'}</Tag>
},
}, },
{ {
title: '操作', title: '操作',
key: 'action', key: 'action',
width: 200,
fixed: 'right' as const,
render: (_: unknown, record: BuiltInDataSource) => ( render: (_: unknown, record: BuiltInDataSource) => (
<Space> <Space size="small">
<Button <Button
type="link" type="link"
size="small"
icon={<SyncOutlined />} icon={<SyncOutlined />}
onClick={() => handleTrigger(record.id)} onClick={() => handleTrigger(record.id)}
> >
@@ -265,6 +379,7 @@ function DataSources() {
</Button> </Button>
<Button <Button
type="link" type="link"
size="small"
icon={record.is_active ? <PauseCircleOutlined /> : <PlayCircleOutlined />} icon={record.is_active ? <PauseCircleOutlined /> : <PlayCircleOutlined />}
onClick={() => handleToggle(record.id, record.is_active)} onClick={() => handleToggle(record.id, record.is_active)}
> >
@@ -276,29 +391,33 @@ function DataSources() {
] ]
const customColumns = [ const customColumns = [
{ title: 'ID', dataIndex: 'id', key: 'id', width: 60 }, { title: 'ID', dataIndex: 'id', key: 'id', width: 60, fixed: 'left' as const },
{ title: '名称', dataIndex: 'name', key: 'name' }, { title: '名称', dataIndex: 'name', key: 'name', width: 150, ellipsis: true },
{ title: '类型', dataIndex: 'source_type', key: 'source_type' }, { title: '类型', dataIndex: 'source_type', key: 'source_type', width: 100 },
{ {
title: '状态', title: '状态',
dataIndex: 'is_active', dataIndex: 'is_active',
key: 'is_active', key: 'is_active',
width: 80,
render: (active: boolean) => ( render: (active: boolean) => (
<Tag color={active ? 'green' : 'red'}>{active ? '启用' : '禁用'}</Tag> <Tag color={active ? 'green' : 'red'}>{active ? '启用' : '禁用'}</Tag>
), ),
}, },
{ title: '创建时间', dataIndex: 'created_at', key: 'created_at' }, { title: '创建时间', dataIndex: 'created_at', key: 'created_at', width: 160 },
{ {
title: '操作', title: '操作',
key: 'action', key: 'action',
width: 150,
fixed: 'right' as const,
render: (_: unknown, record: CustomDataSource) => ( render: (_: unknown, record: CustomDataSource) => (
<Space> <Space size="small">
<Tooltip title="编辑"> <Tooltip title="编辑">
<Button type="link" icon={<EditOutlined />} onClick={() => openDrawer(record)} /> <Button type="link" size="small" icon={<EditOutlined />} onClick={() => openDrawer(record)} />
</Tooltip> </Tooltip>
<Tooltip title={record.is_active ? '禁用' : '启用'}> <Tooltip title={record.is_active ? '禁用' : '启用'}>
<Button <Button
type="link" type="link"
size="small"
icon={record.is_active ? <PauseCircleOutlined /> : <PlayCircleOutlined />} icon={record.is_active ? <PauseCircleOutlined /> : <PlayCircleOutlined />}
onClick={() => handleToggleCustom(record.id, record.is_active)} onClick={() => handleToggleCustom(record.id, record.is_active)}
/> />
@@ -308,7 +427,7 @@ function DataSources() {
onConfirm={() => handleDelete(record.id)} onConfirm={() => handleDelete(record.id)}
> >
<Tooltip title="删除"> <Tooltip title="删除">
<Button type="link" danger icon={<DeleteOutlined />} /> <Button type="link" size="small" danger icon={<DeleteOutlined />} />
</Tooltip> </Tooltip>
</Popconfirm> </Popconfirm>
</Space> </Space>
@@ -327,8 +446,9 @@ function DataSources() {
rowKey="id" rowKey="id"
loading={loading} loading={loading}
pagination={false} pagination={false}
scroll={{ x: 'max-content' }} scroll={{ x: 800, y: 'auto' }}
tableLayout="fixed" tableLayout="fixed"
size="small"
/> />
), ),
}, },
@@ -355,8 +475,9 @@ function DataSources() {
rowKey="id" rowKey="id"
loading={loading} loading={loading}
pagination={false} pagination={false}
scroll={{ x: 'max-content' }} scroll={{ x: 600, y: 'auto' }}
tableLayout="fixed" tableLayout="fixed"
size="small"
/> />
)} )}
</> </>
@@ -590,6 +711,17 @@ function DataSources() {
}} }}
footer={ footer={
<div style={{ display: 'flex', justifyContent: 'space-between' }}> <div style={{ display: 'flex', justifyContent: 'space-between' }}>
<Popconfirm
title={`确定删除"${viewingSource?.name}"的所有数据?`}
onConfirm={handleClearDataFromDrawer}
okText="确定"
cancelText="取消"
>
<Button danger icon={<ClearOutlined />}>
</Button>
</Popconfirm>
<Space>
<Button <Button
icon={<ExperimentOutlined />} icon={<ExperimentOutlined />}
loading={testing} loading={testing}
@@ -597,7 +729,6 @@ function DataSources() {
> >
</Button> </Button>
<Space>
<Button onClick={() => setViewDrawerVisible(false)}></Button> <Button onClick={() => setViewDrawerVisible(false)}></Button>
<Button <Button
type="primary" type="primary"
@@ -616,6 +747,10 @@ function DataSources() {
<Input value={viewingSource.name} disabled /> <Input value={viewingSource.name} disabled />
</Form.Item> </Form.Item>
<Form.Item label="数据量">
<Input value={`${recordCount}`} disabled />
</Form.Item>
<Form.Item label="采集器"> <Form.Item label="采集器">
<Input value={viewingSource.collector_class} disabled /> <Input value={viewingSource.collector_class} disabled />
</Form.Item> </Form.Item>

View File

@@ -1,5 +1,6 @@
import { defineConfig } from 'vite' import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react' import react from '@vitejs/plugin-react'
import path from 'path'
export default defineConfig({ export default defineConfig({
plugins: [react()], plugins: [react()],
@@ -18,5 +19,14 @@ export default defineConfig({
secure: false, secure: false,
}, },
}, },
fs: {
allow: ['..'],
},
},
publicDir: 'public',
resolve: {
alias: {
'@': path.resolve(__dirname, './src'),
},
}, },
}) })