first commit

This commit is contained in:
rayd1o
2026-03-05 11:46:58 +08:00
commit e7033775d8
20657 changed files with 1988940 additions and 0 deletions

View File

@@ -0,0 +1,239 @@
"""Dashboard API with caching and optimizations"""
from datetime import datetime, timedelta
from fastapi import APIRouter, Depends
from sqlalchemy import select, func, text
from sqlalchemy.ext.asyncio import AsyncSession
from app.db.session import get_db
from app.models.user import User
from app.models.datasource import DataSource
from app.models.datasource_config import DataSourceConfig
from app.models.alert import Alert, AlertSeverity
from app.models.task import CollectionTask
from app.core.security import get_current_user
from app.core.cache import cache
# Built-in collectors info (mirrored from datasources.py)
COLLECTOR_INFO = {
"top500": {
"id": 1,
"name": "TOP500 Supercomputers",
"module": "L1",
"priority": "P0",
"frequency_hours": 4,
},
"epoch_ai_gpu": {
"id": 2,
"name": "Epoch AI GPU Clusters",
"module": "L1",
"priority": "P0",
"frequency_hours": 6,
},
"huggingface_models": {
"id": 3,
"name": "HuggingFace Models",
"module": "L2",
"priority": "P1",
"frequency_hours": 12,
},
"huggingface_datasets": {
"id": 4,
"name": "HuggingFace Datasets",
"module": "L2",
"priority": "P1",
"frequency_hours": 12,
},
"huggingface_spaces": {
"id": 5,
"name": "HuggingFace Spaces",
"module": "L2",
"priority": "P2",
"frequency_hours": 24,
},
"peeringdb_ixp": {
"id": 6,
"name": "PeeringDB IXP",
"module": "L2",
"priority": "P1",
"frequency_hours": 24,
},
"peeringdb_network": {
"id": 7,
"name": "PeeringDB Networks",
"module": "L2",
"priority": "P2",
"frequency_hours": 48,
},
"peeringdb_facility": {
"id": 8,
"name": "PeeringDB Facilities",
"module": "L2",
"priority": "P2",
"frequency_hours": 48,
},
"telegeography_cables": {
"id": 9,
"name": "Submarine Cables",
"module": "L2",
"priority": "P1",
"frequency_hours": 168,
},
"telegeography_landing": {
"id": 10,
"name": "Cable Landing Points",
"module": "L2",
"priority": "P2",
"frequency_hours": 168,
},
"telegeography_systems": {
"id": 11,
"name": "Cable Systems",
"module": "L2",
"priority": "P2",
"frequency_hours": 168,
},
}
router = APIRouter()
@router.get("/stats")
async def get_stats(
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Get dashboard statistics with caching"""
cache_key = "dashboard:stats"
cached_result = cache.get(cache_key)
if cached_result:
return cached_result
today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
# Count built-in collectors
built_in_count = len(COLLECTOR_INFO)
built_in_active = built_in_count # Built-in are always "active" for counting purposes
# Count custom configs from database
result = await db.execute(select(func.count(DataSourceConfig.id)))
custom_count = result.scalar() or 0
result = await db.execute(
select(func.count(DataSourceConfig.id)).where(DataSourceConfig.is_active == True)
)
custom_active = result.scalar() or 0
# Total datasources
total_datasources = built_in_count + custom_count
active_datasources = built_in_active + custom_active
# Tasks today (from database)
result = await db.execute(
select(func.count(CollectionTask.id)).where(CollectionTask.started_at >= today_start)
)
tasks_today = result.scalar() or 0
result = await db.execute(
select(func.count(CollectionTask.id)).where(
CollectionTask.status == "success",
CollectionTask.started_at >= today_start,
)
)
success_tasks = result.scalar() or 0
success_rate = (success_tasks / tasks_today * 100) if tasks_today > 0 else 0
# Alerts
result = await db.execute(
select(func.count(Alert.id)).where(
Alert.severity == AlertSeverity.CRITICAL,
Alert.status == "active",
)
)
critical_alerts = result.scalar() or 0
result = await db.execute(
select(func.count(Alert.id)).where(
Alert.severity == AlertSeverity.WARNING,
Alert.status == "active",
)
)
warning_alerts = result.scalar() or 0
result = await db.execute(
select(func.count(Alert.id)).where(
Alert.severity == AlertSeverity.INFO,
Alert.status == "active",
)
)
info_alerts = result.scalar() or 0
response = {
"total_datasources": total_datasources,
"active_datasources": active_datasources,
"tasks_today": tasks_today,
"success_rate": round(success_rate, 1),
"last_updated": datetime.utcnow().isoformat(),
"alerts": {
"critical": critical_alerts,
"warning": warning_alerts,
"info": info_alerts,
},
}
cache.set(cache_key, response, expire_seconds=60)
return response
@router.get("/summary")
async def get_summary(
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Get dashboard summary by module with caching"""
cache_key = "dashboard:summary"
cached_result = cache.get(cache_key)
if cached_result:
return cached_result
# Count by module for built-in collectors
builtin_by_module = {}
for name, info in COLLECTOR_INFO.items():
module = info["module"]
if module not in builtin_by_module:
builtin_by_module[module] = {"datasources": 0, "sources": []}
builtin_by_module[module]["datasources"] += 1
builtin_by_module[module]["sources"].append(info["name"])
# Count custom configs by module (default to L3 for custom)
result = await db.execute(
select(DataSourceConfig.source_type, func.count(DataSourceConfig.id).label("count"))
.where(DataSourceConfig.is_active == True)
.group_by(DataSourceConfig.source_type)
)
custom_rows = result.fetchall()
for row in custom_rows:
source_type = row.source_type
module = "L3" # Custom configs default to L3
if module not in builtin_by_module:
builtin_by_module[module] = {"datasources": 0, "sources": []}
builtin_by_module[module]["datasources"] += row.count
builtin_by_module[module]["sources"].append(f"自定义 ({source_type})")
summary = {}
for module, data in builtin_by_module.items():
summary[module] = {
"datasources": data["datasources"],
"total_records": 0, # Built-in don't track this in dashboard stats
"last_updated": datetime.utcnow().isoformat(),
}
response = {"modules": summary, "last_updated": datetime.utcnow().isoformat()}
cache.set(cache_key, response, expire_seconds=300)
return response