feat: persist system settings and refine admin layouts

This commit is contained in:
rayd1o
2026-03-25 02:57:58 +08:00
parent 81a0ca5e7a
commit ef0fefdfc7
19 changed files with 2091 additions and 1231 deletions

View File

@@ -1,155 +1,66 @@
from typing import List, Optional
from datetime import datetime
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy import select, func
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.security import get_current_user
from app.db.session import get_db
from app.models.user import User
from app.models.collected_data import CollectedData
from app.models.datasource import DataSource
from app.models.task import CollectionTask
from app.models.collected_data import CollectedData
from app.core.security import get_current_user
from app.services.collectors.registry import collector_registry
from app.models.user import User
from app.services.scheduler import run_collector_now, sync_datasource_job
router = APIRouter()
COLLECTOR_INFO = {
"top500": {
"id": 1,
"name": "TOP500 Supercomputers",
"module": "L1",
"priority": "P0",
"frequency_hours": 4,
},
"epoch_ai_gpu": {
"id": 2,
"name": "Epoch AI GPU Clusters",
"module": "L1",
"priority": "P0",
"frequency_hours": 6,
},
"huggingface_models": {
"id": 3,
"name": "HuggingFace Models",
"module": "L2",
"priority": "P1",
"frequency_hours": 12,
},
"huggingface_datasets": {
"id": 4,
"name": "HuggingFace Datasets",
"module": "L2",
"priority": "P1",
"frequency_hours": 12,
},
"huggingface_spaces": {
"id": 5,
"name": "HuggingFace Spaces",
"module": "L2",
"priority": "P2",
"frequency_hours": 24,
},
"peeringdb_ixp": {
"id": 6,
"name": "PeeringDB IXP",
"module": "L2",
"priority": "P1",
"frequency_hours": 24,
},
"peeringdb_network": {
"id": 7,
"name": "PeeringDB Networks",
"module": "L2",
"priority": "P2",
"frequency_hours": 48,
},
"peeringdb_facility": {
"id": 8,
"name": "PeeringDB Facilities",
"module": "L2",
"priority": "P2",
"frequency_hours": 48,
},
"telegeography_cables": {
"id": 9,
"name": "Submarine Cables",
"module": "L2",
"priority": "P1",
"frequency_hours": 168,
},
"telegeography_landing": {
"id": 10,
"name": "Cable Landing Points",
"module": "L2",
"priority": "P2",
"frequency_hours": 168,
},
"telegeography_systems": {
"id": 11,
"name": "Cable Systems",
"module": "L2",
"priority": "P2",
"frequency_hours": 168,
},
"arcgis_cables": {
"id": 15,
"name": "ArcGIS Submarine Cables",
"module": "L2",
"priority": "P1",
"frequency_hours": 168,
},
"arcgis_landing_points": {
"id": 16,
"name": "ArcGIS Landing Points",
"module": "L2",
"priority": "P1",
"frequency_hours": 168,
},
"arcgis_cable_landing_relation": {
"id": 17,
"name": "ArcGIS Cable-Landing Relations",
"module": "L2",
"priority": "P1",
"frequency_hours": 168,
},
"fao_landing_points": {
"id": 18,
"name": "FAO Landing Points",
"module": "L2",
"priority": "P1",
"frequency_hours": 168,
},
"spacetrack_tle": {
"id": 19,
"name": "Space-Track TLE",
"module": "L3",
"priority": "P2",
"frequency_hours": 24,
},
"celestrak_tle": {
"id": 20,
"name": "CelesTrak TLE",
"module": "L3",
"priority": "P2",
"frequency_hours": 24,
},
}
ID_TO_COLLECTOR = {info["id"]: name for name, info in COLLECTOR_INFO.items()}
COLLECTOR_TO_ID = {name: info["id"] for name, info in COLLECTOR_INFO.items()}
def format_frequency_label(minutes: int) -> str:
if minutes % 1440 == 0:
return f"{minutes // 1440}d"
if minutes % 60 == 0:
return f"{minutes // 60}h"
return f"{minutes}m"
def get_collector_name(source_id: str) -> Optional[str]:
async def get_datasource_record(db: AsyncSession, source_id: str) -> Optional[DataSource]:
datasource = None
try:
numeric_id = int(source_id)
if numeric_id in ID_TO_COLLECTOR:
return ID_TO_COLLECTOR[numeric_id]
datasource = await db.get(DataSource, int(source_id))
except ValueError:
pass
if source_id in COLLECTOR_INFO:
return source_id
return None
if datasource is not None:
return datasource
result = await db.execute(
select(DataSource).where(
(DataSource.source == source_id) | (DataSource.collector_class == source_id)
)
)
return result.scalar_one_or_none()
async def get_last_completed_task(db: AsyncSession, datasource_id: int) -> Optional[CollectionTask]:
result = await db.execute(
select(CollectionTask)
.where(CollectionTask.datasource_id == datasource_id)
.where(CollectionTask.completed_at.isnot(None))
.order_by(CollectionTask.completed_at.desc())
.limit(1)
)
return result.scalar_one_or_none()
async def get_running_task(db: AsyncSession, datasource_id: int) -> Optional[CollectionTask]:
result = await db.execute(
select(CollectionTask)
.where(CollectionTask.datasource_id == datasource_id)
.where(CollectionTask.status == "running")
.order_by(CollectionTask.started_at.desc())
.limit(1)
)
return result.scalar_one_or_none()
@router.get("")
@@ -160,48 +71,24 @@ async def list_datasources(
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
query = select(DataSource)
filters = []
query = select(DataSource).order_by(DataSource.module, DataSource.id)
if module:
filters.append(DataSource.module == module)
query = query.where(DataSource.module == module)
if is_active is not None:
filters.append(DataSource.is_active == is_active)
query = query.where(DataSource.is_active == is_active)
if priority:
filters.append(DataSource.priority == priority)
if filters:
query = query.where(*filters)
query = query.where(DataSource.priority == priority)
result = await db.execute(query)
datasources = result.scalars().all()
collector_list = []
for name, info in COLLECTOR_INFO.items():
is_active_status = collector_registry.is_active(name)
running_task_query = (
select(CollectionTask)
.where(CollectionTask.datasource_id == info["id"])
.where(CollectionTask.status == "running")
.order_by(CollectionTask.started_at.desc())
.limit(1)
for datasource in datasources:
running_task = await get_running_task(db, datasource.id)
last_task = await get_last_completed_task(db, datasource.id)
data_count_result = await db.execute(
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
)
running_result = await db.execute(running_task_query)
running_task = running_result.scalar_one_or_none()
last_run_query = (
select(CollectionTask)
.where(CollectionTask.datasource_id == info["id"])
.where(CollectionTask.completed_at.isnot(None))
.order_by(CollectionTask.completed_at.desc())
.limit(1)
)
last_run_result = await db.execute(last_run_query)
last_task = last_run_result.scalar_one_or_none()
data_count_query = select(func.count(CollectedData.id)).where(CollectedData.source == name)
data_count_result = await db.execute(data_count_query)
data_count = data_count_result.scalar() or 0
last_run = None
@@ -210,13 +97,14 @@ async def list_datasources(
collector_list.append(
{
"id": info["id"],
"name": info["name"],
"module": info["module"],
"priority": info["priority"],
"frequency": f"{info['frequency_hours']}h",
"is_active": is_active_status,
"collector_class": name,
"id": datasource.id,
"name": datasource.name,
"module": datasource.module,
"priority": datasource.priority,
"frequency": format_frequency_label(datasource.frequency_minutes),
"frequency_minutes": datasource.frequency_minutes,
"is_active": datasource.is_active,
"collector_class": datasource.collector_class,
"last_run": last_run,
"is_running": running_task is not None,
"task_id": running_task.id if running_task else None,
@@ -226,15 +114,7 @@ async def list_datasources(
}
)
if module:
collector_list = [c for c in collector_list if c["module"] == module]
if priority:
collector_list = [c for c in collector_list if c["priority"] == priority]
return {
"total": len(collector_list),
"data": collector_list,
}
return {"total": len(collector_list), "data": collector_list}
@router.get("/{source_id}")
@@ -243,19 +123,20 @@ async def get_datasource(
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
datasource = await get_datasource_record(db, source_id)
if not datasource:
raise HTTPException(status_code=404, detail="Data source not found")
info = COLLECTOR_INFO[collector_name]
return {
"id": info["id"],
"name": info["name"],
"module": info["module"],
"priority": info["priority"],
"frequency": f"{info['frequency_hours']}h",
"collector_class": collector_name,
"is_active": collector_registry.is_active(collector_name),
"id": datasource.id,
"name": datasource.name,
"module": datasource.module,
"priority": datasource.priority,
"frequency": format_frequency_label(datasource.frequency_minutes),
"frequency_minutes": datasource.frequency_minutes,
"collector_class": datasource.collector_class,
"source": datasource.source,
"is_active": datasource.is_active,
}
@@ -263,24 +144,32 @@ async def get_datasource(
async def enable_datasource(
source_id: str,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
datasource = await get_datasource_record(db, source_id)
if not datasource:
raise HTTPException(status_code=404, detail="Data source not found")
collector_registry.set_active(collector_name, True)
return {"status": "enabled", "source_id": source_id}
datasource.is_active = True
await db.commit()
await sync_datasource_job(datasource.id)
return {"status": "enabled", "source_id": datasource.id}
@router.post("/{source_id}/disable")
async def disable_datasource(
source_id: str,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
datasource = await get_datasource_record(db, source_id)
if not datasource:
raise HTTPException(status_code=404, detail="Data source not found")
collector_registry.set_active(collector_name, False)
return {"status": "disabled", "source_id": source_id}
datasource.is_active = False
await db.commit()
await sync_datasource_job(datasource.id)
return {"status": "disabled", "source_id": datasource.id}
@router.get("/{source_id}/stats")
@@ -289,26 +178,19 @@ async def get_datasource_stats(
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
datasource = await get_datasource_record(db, source_id)
if not datasource:
raise HTTPException(status_code=404, detail="Data source not found")
info = COLLECTOR_INFO[collector_name]
source_name = info["name"]
query = select(func.count(CollectedData.id)).where(CollectedData.source == collector_name)
result = await db.execute(query)
result = await db.execute(
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
)
total = result.scalar() or 0
if total == 0:
query = select(func.count(CollectedData.id)).where(CollectedData.source == source_name)
result = await db.execute(query)
total = result.scalar() or 0
return {
"source_id": source_id,
"collector_name": collector_name,
"name": info["name"],
"source_id": datasource.id,
"collector_name": datasource.collector_class,
"name": datasource.name,
"total_records": total,
}
@@ -317,30 +199,25 @@ async def get_datasource_stats(
async def trigger_datasource(
source_id: str,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
datasource = await get_datasource_record(db, source_id)
if not datasource:
raise HTTPException(status_code=404, detail="Data source not found")
from app.services.scheduler import run_collector_now
if not collector_registry.is_active(collector_name):
if not datasource.is_active:
raise HTTPException(status_code=400, detail="Data source is disabled")
success = run_collector_now(collector_name)
success = run_collector_now(datasource.source)
if not success:
raise HTTPException(status_code=500, detail=f"Failed to trigger collector '{datasource.source}'")
if success:
return {
"status": "triggered",
"source_id": source_id,
"collector_name": collector_name,
"message": f"Collector '{collector_name}' has been triggered",
}
else:
raise HTTPException(
status_code=500,
detail=f"Failed to trigger collector '{collector_name}'",
)
return {
"status": "triggered",
"source_id": datasource.id,
"collector_name": datasource.source,
"message": f"Collector '{datasource.source}' has been triggered",
}
@router.delete("/{source_id}/data")
@@ -349,39 +226,25 @@ async def clear_datasource_data(
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
datasource = await get_datasource_record(db, source_id)
if not datasource:
raise HTTPException(status_code=404, detail="Data source not found")
info = COLLECTOR_INFO[collector_name]
source_name = info["name"]
query = select(func.count(CollectedData.id)).where(CollectedData.source == collector_name)
result = await db.execute(query)
result = await db.execute(
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
)
count = result.scalar() or 0
if count == 0:
query = select(func.count(CollectedData.id)).where(CollectedData.source == source_name)
result = await db.execute(query)
count = result.scalar() or 0
delete_source = source_name
else:
delete_source = collector_name
return {"status": "success", "message": "No data to clear", "deleted_count": 0}
if count == 0:
return {
"status": "success",
"message": "No data to clear",
"deleted_count": 0,
}
delete_query = CollectedData.__table__.delete().where(CollectedData.source == delete_source)
delete_query = CollectedData.__table__.delete().where(CollectedData.source == datasource.source)
await db.execute(delete_query)
await db.commit()
return {
"status": "success",
"message": f"Cleared {count} records for data source '{info['name']}'",
"message": f"Cleared {count} records for data source '{datasource.name}'",
"deleted_count": count,
}
@@ -391,22 +254,11 @@ async def get_task_status(
source_id: str,
db: AsyncSession = Depends(get_db),
):
collector_name = get_collector_name(source_id)
if not collector_name:
datasource = await get_datasource_record(db, source_id)
if not datasource:
raise HTTPException(status_code=404, detail="Data source not found")
info = COLLECTOR_INFO[collector_name]
running_task_query = (
select(CollectionTask)
.where(CollectionTask.datasource_id == info["id"])
.where(CollectionTask.status == "running")
.order_by(CollectionTask.started_at.desc())
.limit(1)
)
running_result = await db.execute(running_task_query)
running_task = running_result.scalar_one_or_none()
running_task = await get_running_task(db, datasource.id)
if not running_task:
return {"is_running": False, "task_id": None, "progress": None}
@@ -417,4 +269,4 @@ async def get_task_status(
"records_processed": running_task.records_processed,
"total_records": running_task.total_records,
"status": running_task.status,
}
}