feat: add bgp observability and admin ui improvements
This commit is contained in:
@@ -11,6 +11,7 @@ from app.api.v1 import (
|
||||
settings,
|
||||
collected_data,
|
||||
visualization,
|
||||
bgp,
|
||||
)
|
||||
|
||||
api_router = APIRouter()
|
||||
@@ -27,3 +28,4 @@ api_router.include_router(dashboard.router, prefix="/dashboard", tags=["dashboar
|
||||
api_router.include_router(alerts.router, prefix="/alerts", tags=["alerts"])
|
||||
api_router.include_router(settings.router, prefix="/settings", tags=["settings"])
|
||||
api_router.include_router(visualization.router, prefix="/visualization", tags=["visualization"])
|
||||
api_router.include_router(bgp.router, prefix="/bgp", tags=["bgp"])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
@@ -68,7 +68,7 @@ async def acknowledge_alert(
|
||||
|
||||
alert.status = AlertStatus.ACKNOWLEDGED
|
||||
alert.acknowledged_by = current_user.id
|
||||
alert.acknowledged_at = datetime.utcnow()
|
||||
alert.acknowledged_at = datetime.now(UTC)
|
||||
await db.commit()
|
||||
|
||||
return {"message": "Alert acknowledged", "alert": alert.to_dict()}
|
||||
@@ -89,7 +89,7 @@ async def resolve_alert(
|
||||
|
||||
alert.status = AlertStatus.RESOLVED
|
||||
alert.resolved_by = current_user.id
|
||||
alert.resolved_at = datetime.utcnow()
|
||||
alert.resolved_at = datetime.now(UTC)
|
||||
alert.resolution_notes = resolution
|
||||
await db.commit()
|
||||
|
||||
|
||||
182
backend/app/api/v1/bgp.py
Normal file
182
backend/app/api/v1/bgp.py
Normal file
@@ -0,0 +1,182 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import get_current_user
|
||||
from app.db.session import get_db
|
||||
from app.models.bgp_anomaly import BGPAnomaly
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.models.user import User
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
BGP_SOURCES = ("ris_live_bgp", "bgpstream_bgp")
|
||||
|
||||
|
||||
def _parse_dt(value: Optional[str]) -> Optional[datetime]:
|
||||
if not value:
|
||||
return None
|
||||
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
|
||||
|
||||
def _matches_time(value: Optional[datetime], time_from: Optional[datetime], time_to: Optional[datetime]) -> bool:
|
||||
if value is None:
|
||||
return False
|
||||
if time_from and value < time_from:
|
||||
return False
|
||||
if time_to and value > time_to:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@router.get("/events")
|
||||
async def list_bgp_events(
|
||||
prefix: Optional[str] = Query(None),
|
||||
origin_asn: Optional[int] = Query(None),
|
||||
peer_asn: Optional[int] = Query(None),
|
||||
collector: Optional[str] = Query(None),
|
||||
event_type: Optional[str] = Query(None),
|
||||
source: Optional[str] = Query(None),
|
||||
time_from: Optional[str] = Query(None),
|
||||
time_to: Optional[str] = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=200),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source.in_(BGP_SOURCES))
|
||||
.order_by(CollectedData.reference_date.desc().nullslast(), CollectedData.id.desc())
|
||||
)
|
||||
if source:
|
||||
stmt = stmt.where(CollectedData.source == source)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
dt_from = _parse_dt(time_from)
|
||||
dt_to = _parse_dt(time_to)
|
||||
|
||||
filtered = []
|
||||
for record in records:
|
||||
metadata = record.extra_data or {}
|
||||
if prefix and metadata.get("prefix") != prefix:
|
||||
continue
|
||||
if origin_asn is not None and metadata.get("origin_asn") != origin_asn:
|
||||
continue
|
||||
if peer_asn is not None and metadata.get("peer_asn") != peer_asn:
|
||||
continue
|
||||
if collector and metadata.get("collector") != collector:
|
||||
continue
|
||||
if event_type and metadata.get("event_type") != event_type:
|
||||
continue
|
||||
if (dt_from or dt_to) and not _matches_time(record.reference_date, dt_from, dt_to):
|
||||
continue
|
||||
filtered.append(record)
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
return {
|
||||
"total": len(filtered),
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"data": [record.to_dict() for record in filtered[offset : offset + page_size]],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/events/{event_id}")
|
||||
async def get_bgp_event(
|
||||
event_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
record = await db.get(CollectedData, event_id)
|
||||
if not record or record.source not in BGP_SOURCES:
|
||||
raise HTTPException(status_code=404, detail="BGP event not found")
|
||||
return record.to_dict()
|
||||
|
||||
|
||||
@router.get("/anomalies")
|
||||
async def list_bgp_anomalies(
|
||||
severity: Optional[str] = Query(None),
|
||||
anomaly_type: Optional[str] = Query(None),
|
||||
status: Optional[str] = Query(None),
|
||||
prefix: Optional[str] = Query(None),
|
||||
origin_asn: Optional[int] = Query(None),
|
||||
time_from: Optional[str] = Query(None),
|
||||
time_to: Optional[str] = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=200),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
stmt = select(BGPAnomaly).order_by(BGPAnomaly.created_at.desc(), BGPAnomaly.id.desc())
|
||||
if severity:
|
||||
stmt = stmt.where(BGPAnomaly.severity == severity)
|
||||
if anomaly_type:
|
||||
stmt = stmt.where(BGPAnomaly.anomaly_type == anomaly_type)
|
||||
if status:
|
||||
stmt = stmt.where(BGPAnomaly.status == status)
|
||||
if prefix:
|
||||
stmt = stmt.where(BGPAnomaly.prefix == prefix)
|
||||
if origin_asn is not None:
|
||||
stmt = stmt.where(BGPAnomaly.origin_asn == origin_asn)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
dt_from = _parse_dt(time_from)
|
||||
dt_to = _parse_dt(time_to)
|
||||
if dt_from or dt_to:
|
||||
records = [record for record in records if _matches_time(record.created_at, dt_from, dt_to)]
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
return {
|
||||
"total": len(records),
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"data": [record.to_dict() for record in records[offset : offset + page_size]],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/anomalies/summary")
|
||||
async def get_bgp_anomaly_summary(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
total_result = await db.execute(select(func.count(BGPAnomaly.id)))
|
||||
type_result = await db.execute(
|
||||
select(BGPAnomaly.anomaly_type, func.count(BGPAnomaly.id))
|
||||
.group_by(BGPAnomaly.anomaly_type)
|
||||
.order_by(func.count(BGPAnomaly.id).desc())
|
||||
)
|
||||
severity_result = await db.execute(
|
||||
select(BGPAnomaly.severity, func.count(BGPAnomaly.id))
|
||||
.group_by(BGPAnomaly.severity)
|
||||
.order_by(func.count(BGPAnomaly.id).desc())
|
||||
)
|
||||
status_result = await db.execute(
|
||||
select(BGPAnomaly.status, func.count(BGPAnomaly.id))
|
||||
.group_by(BGPAnomaly.status)
|
||||
.order_by(func.count(BGPAnomaly.id).desc())
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total_result.scalar() or 0,
|
||||
"by_type": {row[0]: row[1] for row in type_result.fetchall()},
|
||||
"by_severity": {row[0]: row[1] for row in severity_result.fetchall()},
|
||||
"by_status": {row[0]: row[1] for row in status_result.fetchall()},
|
||||
}
|
||||
|
||||
|
||||
@router.get("/anomalies/{anomaly_id}")
|
||||
async def get_bgp_anomaly(
|
||||
anomaly_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
record = await db.get(BGPAnomaly, anomaly_id)
|
||||
if not record:
|
||||
raise HTTPException(status_code=404, detail="BGP anomaly not found")
|
||||
return record.to_dict()
|
||||
@@ -9,10 +9,12 @@ import io
|
||||
|
||||
from app.core.collected_data_fields import get_metadata_field
|
||||
from app.core.countries import COUNTRY_OPTIONS, get_country_search_variants, normalize_country
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.core.security import get_current_user
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.models.datasource import DataSource
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -100,11 +102,13 @@ def build_search_rank_sql(search: Optional[str]) -> str:
|
||||
"""
|
||||
|
||||
|
||||
def serialize_collected_row(row) -> dict:
|
||||
def serialize_collected_row(row, source_name_map: dict[str, str] | None = None) -> dict:
|
||||
metadata = row[7]
|
||||
source = row[1]
|
||||
return {
|
||||
"id": row[0],
|
||||
"source": row[1],
|
||||
"source": source,
|
||||
"source_name": source_name_map.get(source, source) if source_name_map else source,
|
||||
"source_id": row[2],
|
||||
"data_type": row[3],
|
||||
"name": row[4],
|
||||
@@ -121,12 +125,17 @@ def serialize_collected_row(row) -> dict:
|
||||
"rmax": get_metadata_field(metadata, "rmax"),
|
||||
"rpeak": get_metadata_field(metadata, "rpeak"),
|
||||
"power": get_metadata_field(metadata, "power"),
|
||||
"collected_at": row[8].isoformat() if row[8] else None,
|
||||
"reference_date": row[9].isoformat() if row[9] else None,
|
||||
"collected_at": to_iso8601_utc(row[8]),
|
||||
"reference_date": to_iso8601_utc(row[9]),
|
||||
"is_valid": row[10],
|
||||
}
|
||||
|
||||
|
||||
async def get_source_name_map(db: AsyncSession) -> dict[str, str]:
|
||||
result = await db.execute(select(DataSource.source, DataSource.name))
|
||||
return {row[0]: row[1] for row in result.fetchall()}
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_collected_data(
|
||||
mode: str = Query("current", description="查询模式: current/history"),
|
||||
@@ -188,10 +197,11 @@ async def list_collected_data(
|
||||
|
||||
result = await db.execute(query, params)
|
||||
rows = result.fetchall()
|
||||
source_name_map = await get_source_name_map(db)
|
||||
|
||||
data = []
|
||||
for row in rows:
|
||||
data.append(serialize_collected_row(row[:11]))
|
||||
data.append(serialize_collected_row(row[:11], source_name_map))
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
@@ -221,6 +231,7 @@ async def get_data_summary(
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
source_name_map = await get_source_name_map(db)
|
||||
|
||||
by_source = {}
|
||||
total = 0
|
||||
@@ -229,9 +240,10 @@ async def get_data_summary(
|
||||
data_type = row[1]
|
||||
count = row[2]
|
||||
|
||||
if source not in by_source:
|
||||
by_source[source] = {}
|
||||
by_source[source][data_type] = count
|
||||
source_key = source_name_map.get(source, source)
|
||||
if source_key not in by_source:
|
||||
by_source[source_key] = {}
|
||||
by_source[source_key][data_type] = count
|
||||
total += count
|
||||
|
||||
# Total by source
|
||||
@@ -249,7 +261,14 @@ async def get_data_summary(
|
||||
return {
|
||||
"total_records": total,
|
||||
"by_source": by_source,
|
||||
"source_totals": [{"source": row[0], "count": row[1]} for row in source_rows],
|
||||
"source_totals": [
|
||||
{
|
||||
"source": row[0],
|
||||
"source_name": source_name_map.get(row[0], row[0]),
|
||||
"count": row[1],
|
||||
}
|
||||
for row in source_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -269,9 +288,13 @@ async def get_data_sources(
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
source_name_map = await get_source_name_map(db)
|
||||
|
||||
return {
|
||||
"sources": [row[0] for row in rows],
|
||||
"sources": [
|
||||
{"source": row[0], "source_name": source_name_map.get(row[0], row[0])}
|
||||
for row in rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -334,7 +357,8 @@ async def get_collected_data(
|
||||
detail="数据不存在",
|
||||
)
|
||||
|
||||
return serialize_collected_row(row)
|
||||
source_name_map = await get_source_name_map(db)
|
||||
return serialize_collected_row(row, source_name_map)
|
||||
|
||||
|
||||
def build_where_clause(
|
||||
@@ -482,8 +506,8 @@ async def export_csv(
|
||||
get_metadata_field(row[7], "value"),
|
||||
get_metadata_field(row[7], "unit"),
|
||||
json.dumps(row[7]) if row[7] else "",
|
||||
row[8].isoformat() if row[8] else "",
|
||||
row[9].isoformat() if row[9] else "",
|
||||
to_iso8601_utc(row[8]) or "",
|
||||
to_iso8601_utc(row[9]) or "",
|
||||
row[10],
|
||||
]
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Dashboard API with caching and optimizations"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy import select, func, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
@@ -13,6 +13,7 @@ from app.models.alert import Alert, AlertSeverity
|
||||
from app.models.task import CollectionTask
|
||||
from app.core.security import get_current_user
|
||||
from app.core.cache import cache
|
||||
from app.core.time import to_iso8601_utc
|
||||
|
||||
|
||||
# Built-in collectors info (mirrored from datasources.py)
|
||||
@@ -111,7 +112,7 @@ async def get_stats(
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
today_start = datetime.now(UTC).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
# Count built-in collectors
|
||||
built_in_count = len(COLLECTOR_INFO)
|
||||
@@ -175,7 +176,7 @@ async def get_stats(
|
||||
"active_datasources": active_datasources,
|
||||
"tasks_today": tasks_today,
|
||||
"success_rate": round(success_rate, 1),
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
"last_updated": to_iso8601_utc(datetime.now(UTC)),
|
||||
"alerts": {
|
||||
"critical": critical_alerts,
|
||||
"warning": warning_alerts,
|
||||
@@ -230,10 +231,10 @@ async def get_summary(
|
||||
summary[module] = {
|
||||
"datasources": data["datasources"],
|
||||
"total_records": 0, # Built-in don't track this in dashboard stats
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
"last_updated": to_iso8601_utc(datetime.now(UTC)),
|
||||
}
|
||||
|
||||
response = {"modules": summary, "last_updated": datetime.utcnow().isoformat()}
|
||||
response = {"modules": summary, "last_updated": to_iso8601_utc(datetime.now(UTC))}
|
||||
|
||||
cache.set(cache_key, response, expire_seconds=300)
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from app.models.user import User
|
||||
from app.models.datasource_config import DataSourceConfig
|
||||
from app.core.security import get_current_user
|
||||
from app.core.cache import cache
|
||||
from app.core.time import to_iso8601_utc
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -123,8 +124,8 @@ async def list_configs(
|
||||
"headers": c.headers,
|
||||
"config": c.config,
|
||||
"is_active": c.is_active,
|
||||
"created_at": c.created_at.isoformat() if c.created_at else None,
|
||||
"updated_at": c.updated_at.isoformat() if c.updated_at else None,
|
||||
"created_at": to_iso8601_utc(c.created_at),
|
||||
"updated_at": to_iso8601_utc(c.updated_at),
|
||||
}
|
||||
for c in configs
|
||||
],
|
||||
@@ -155,8 +156,8 @@ async def get_config(
|
||||
"headers": config.headers,
|
||||
"config": config.config,
|
||||
"is_active": config.is_active,
|
||||
"created_at": config.created_at.isoformat() if config.created_at else None,
|
||||
"updated_at": config.updated_at.isoformat() if config.updated_at else None,
|
||||
"created_at": to_iso8601_utc(config.created_at),
|
||||
"updated_at": to_iso8601_utc(config.updated_at),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.core.security import get_current_user
|
||||
from app.core.data_sources import get_data_sources_config
|
||||
from app.db.session import get_db
|
||||
@@ -24,6 +27,12 @@ def format_frequency_label(minutes: int) -> str:
|
||||
return f"{minutes}m"
|
||||
|
||||
|
||||
def is_due_for_collection(datasource: DataSource, now: datetime) -> bool:
|
||||
if datasource.last_run_at is None:
|
||||
return True
|
||||
return datasource.last_run_at + timedelta(minutes=datasource.frequency_minutes) <= now
|
||||
|
||||
|
||||
async def get_datasource_record(db: AsyncSession, source_id: str) -> Optional[DataSource]:
|
||||
datasource = None
|
||||
try:
|
||||
@@ -47,6 +56,7 @@ async def get_last_completed_task(db: AsyncSession, datasource_id: int) -> Optio
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == datasource_id)
|
||||
.where(CollectionTask.completed_at.isnot(None))
|
||||
.where(CollectionTask.status.in_(("success", "failed", "cancelled")))
|
||||
.order_by(CollectionTask.completed_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
@@ -94,9 +104,9 @@ async def list_datasources(
|
||||
)
|
||||
data_count = data_count_result.scalar() or 0
|
||||
|
||||
last_run = None
|
||||
if last_task and last_task.completed_at and data_count > 0:
|
||||
last_run = last_task.completed_at.strftime("%Y-%m-%d %H:%M")
|
||||
last_run_at = datasource.last_run_at or (last_task.completed_at if last_task else None)
|
||||
last_run = to_iso8601_utc(last_run_at)
|
||||
last_status = datasource.last_status or (last_task.status if last_task else None)
|
||||
|
||||
collector_list.append(
|
||||
{
|
||||
@@ -110,6 +120,10 @@ async def list_datasources(
|
||||
"collector_class": datasource.collector_class,
|
||||
"endpoint": endpoint,
|
||||
"last_run": last_run,
|
||||
"last_run_at": to_iso8601_utc(last_run_at),
|
||||
"last_status": last_status,
|
||||
"last_records_processed": last_task.records_processed if last_task else None,
|
||||
"data_count": data_count,
|
||||
"is_running": running_task is not None,
|
||||
"task_id": running_task.id if running_task else None,
|
||||
"progress": running_task.progress if running_task else None,
|
||||
@@ -122,6 +136,105 @@ async def list_datasources(
|
||||
return {"total": len(collector_list), "data": collector_list}
|
||||
|
||||
|
||||
@router.post("/trigger-all")
|
||||
async def trigger_all_datasources(
|
||||
force: bool = Query(False),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(
|
||||
select(DataSource)
|
||||
.where(DataSource.is_active == True)
|
||||
.order_by(DataSource.module, DataSource.id)
|
||||
)
|
||||
datasources = result.scalars().all()
|
||||
|
||||
if not datasources:
|
||||
return {
|
||||
"status": "noop",
|
||||
"message": "No active data sources to trigger",
|
||||
"triggered": [],
|
||||
"skipped": [],
|
||||
"failed": [],
|
||||
}
|
||||
|
||||
previous_task_ids: dict[int, Optional[int]] = {}
|
||||
triggered_sources: list[dict] = []
|
||||
skipped_sources: list[dict] = []
|
||||
failed_sources: list[dict] = []
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
for datasource in datasources:
|
||||
running_task = await get_running_task(db, datasource.id)
|
||||
if running_task is not None:
|
||||
skipped_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "already_running",
|
||||
"task_id": running_task.id,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
if not force and not is_due_for_collection(datasource, now):
|
||||
skipped_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "within_frequency_window",
|
||||
"last_run_at": to_iso8601_utc(datasource.last_run_at),
|
||||
"next_run_at": to_iso8601_utc(
|
||||
datasource.last_run_at + timedelta(minutes=datasource.frequency_minutes)
|
||||
),
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
previous_task_ids[datasource.id] = await get_latest_task_id_for_datasource(datasource.id)
|
||||
success = run_collector_now(datasource.source)
|
||||
if not success:
|
||||
failed_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "trigger_failed",
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
triggered_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"task_id": None,
|
||||
}
|
||||
)
|
||||
|
||||
for _ in range(20):
|
||||
await asyncio.sleep(0.1)
|
||||
pending = [item for item in triggered_sources if item["task_id"] is None]
|
||||
if not pending:
|
||||
break
|
||||
for item in pending:
|
||||
task_id = await get_latest_task_id_for_datasource(item["id"])
|
||||
if task_id is not None and task_id != previous_task_ids.get(item["id"]):
|
||||
item["task_id"] = task_id
|
||||
|
||||
return {
|
||||
"status": "triggered" if triggered_sources else "partial",
|
||||
"message": f"Triggered {len(triggered_sources)} data sources",
|
||||
"force": force,
|
||||
"triggered": triggered_sources,
|
||||
"skipped": skipped_sources,
|
||||
"failed": failed_sources,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{source_id}")
|
||||
async def get_datasource(
|
||||
source_id: str,
|
||||
@@ -217,15 +330,19 @@ async def trigger_datasource(
|
||||
if not datasource.is_active:
|
||||
raise HTTPException(status_code=400, detail="Data source is disabled")
|
||||
|
||||
previous_task_id = await get_latest_task_id_for_datasource(datasource.id)
|
||||
success = run_collector_now(datasource.source)
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to trigger collector '{datasource.source}'")
|
||||
|
||||
task_id = None
|
||||
for _ in range(10):
|
||||
for _ in range(20):
|
||||
await asyncio.sleep(0.1)
|
||||
task_id = await get_latest_task_id_for_datasource(datasource.id)
|
||||
if task_id is not None:
|
||||
if task_id is not None and task_id != previous_task_id:
|
||||
break
|
||||
if task_id == previous_task_id:
|
||||
task_id = None
|
||||
|
||||
return {
|
||||
"status": "triggered",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
@@ -7,6 +7,7 @@ from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import get_current_user
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import get_db
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.system_setting import SystemSetting
|
||||
@@ -114,9 +115,9 @@ def serialize_collector(datasource: DataSource) -> dict:
|
||||
"frequency_minutes": datasource.frequency_minutes,
|
||||
"frequency": format_frequency_label(datasource.frequency_minutes),
|
||||
"is_active": datasource.is_active,
|
||||
"last_run_at": datasource.last_run_at.isoformat() if datasource.last_run_at else None,
|
||||
"last_run_at": to_iso8601_utc(datasource.last_run_at),
|
||||
"last_status": datasource.last_status,
|
||||
"next_run_at": datasource.next_run_at.isoformat() if datasource.next_run_at else None,
|
||||
"next_run_at": to_iso8601_utc(datasource.next_run_at),
|
||||
}
|
||||
|
||||
|
||||
@@ -216,5 +217,5 @@ async def get_all_settings(
|
||||
"notifications": await get_setting_payload(db, "notifications"),
|
||||
"security": await get_setting_payload(db, "security"),
|
||||
"collectors": [serialize_collector(datasource) for datasource in datasources],
|
||||
"generated_at": datetime.utcnow().isoformat() + "Z",
|
||||
}
|
||||
"generated_at": to_iso8601_utc(datetime.now(UTC)),
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
@@ -8,6 +8,7 @@ from sqlalchemy import text
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.core.security import get_current_user
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.services.collectors.registry import collector_registry
|
||||
|
||||
|
||||
@@ -61,8 +62,8 @@ async def list_tasks(
|
||||
"datasource_id": t[1],
|
||||
"datasource_name": t[2],
|
||||
"status": t[3],
|
||||
"started_at": t[4].isoformat() if t[4] else None,
|
||||
"completed_at": t[5].isoformat() if t[5] else None,
|
||||
"started_at": to_iso8601_utc(t[4]),
|
||||
"completed_at": to_iso8601_utc(t[5]),
|
||||
"records_processed": t[6],
|
||||
"error_message": t[7],
|
||||
}
|
||||
@@ -100,8 +101,8 @@ async def get_task(
|
||||
"datasource_id": task[1],
|
||||
"datasource_name": task[2],
|
||||
"status": task[3],
|
||||
"started_at": task[4].isoformat() if task[4] else None,
|
||||
"completed_at": task[5].isoformat() if task[5] else None,
|
||||
"started_at": to_iso8601_utc(task[4]),
|
||||
"completed_at": to_iso8601_utc(task[5]),
|
||||
"records_processed": task[6],
|
||||
"error_message": task[7],
|
||||
}
|
||||
@@ -147,8 +148,8 @@ async def trigger_collection(
|
||||
"status": result.get("status", "unknown"),
|
||||
"records_processed": result.get("records_processed", 0),
|
||||
"error_message": result.get("error"),
|
||||
"started_at": datetime.utcnow(),
|
||||
"completed_at": datetime.utcnow(),
|
||||
"started_at": datetime.now(UTC),
|
||||
"completed_at": datetime.now(UTC),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ Unified API for all visualization data sources.
|
||||
Returns GeoJSON format compatible with Three.js, CesiumJS, and Unreal Cesium.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from fastapi import APIRouter, HTTPException, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
@@ -12,9 +12,12 @@ from typing import List, Dict, Any, Optional
|
||||
|
||||
from app.core.collected_data_fields import get_record_field
|
||||
from app.core.satellite_tle import build_tle_lines_from_elements
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import get_db
|
||||
from app.models.bgp_anomaly import BGPAnomaly
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.services.cable_graph import build_graph_from_data, CableGraph
|
||||
from app.services.collectors.bgp_common import RIPE_RIS_COLLECTOR_COORDS
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -273,6 +276,58 @@ def convert_gpu_cluster_to_geojson(records: List[CollectedData]) -> Dict[str, An
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_bgp_anomalies_to_geojson(records: List[BGPAnomaly]) -> Dict[str, Any]:
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
evidence = record.evidence or {}
|
||||
collectors = evidence.get("collectors") or record.peer_scope or []
|
||||
collector = collectors[0] if collectors else None
|
||||
location = None
|
||||
if collector:
|
||||
location = RIPE_RIS_COLLECTOR_COORDS.get(str(collector))
|
||||
|
||||
if location is None:
|
||||
nested = evidence.get("events") or []
|
||||
for item in nested:
|
||||
collector_name = (item or {}).get("collector")
|
||||
if collector_name and collector_name in RIPE_RIS_COLLECTOR_COORDS:
|
||||
location = RIPE_RIS_COLLECTOR_COORDS[collector_name]
|
||||
collector = collector_name
|
||||
break
|
||||
|
||||
if location is None:
|
||||
continue
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [location["longitude"], location["latitude"]],
|
||||
},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"collector": collector,
|
||||
"city": location.get("city"),
|
||||
"country": location.get("country"),
|
||||
"source": record.source,
|
||||
"anomaly_type": record.anomaly_type,
|
||||
"severity": record.severity,
|
||||
"status": record.status,
|
||||
"prefix": record.prefix,
|
||||
"origin_asn": record.origin_asn,
|
||||
"new_origin_asn": record.new_origin_asn,
|
||||
"confidence": record.confidence,
|
||||
"summary": record.summary,
|
||||
"created_at": to_iso8601_utc(record.created_at),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
# ============== API Endpoints ==============
|
||||
|
||||
|
||||
@@ -479,6 +534,25 @@ async def get_gpu_clusters_geojson(
|
||||
}
|
||||
|
||||
|
||||
@router.get("/geo/bgp-anomalies")
|
||||
async def get_bgp_anomalies_geojson(
|
||||
severity: Optional[str] = Query(None),
|
||||
status: Optional[str] = Query("active"),
|
||||
limit: int = Query(200, ge=1, le=1000),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
stmt = select(BGPAnomaly).order_by(BGPAnomaly.created_at.desc()).limit(limit)
|
||||
if severity:
|
||||
stmt = stmt.where(BGPAnomaly.severity == severity)
|
||||
if status:
|
||||
stmt = stmt.where(BGPAnomaly.status == status)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = list(result.scalars().all())
|
||||
geojson = convert_bgp_anomalies_to_geojson(records)
|
||||
return {**geojson, "count": len(geojson.get("features", []))}
|
||||
|
||||
|
||||
@router.get("/all")
|
||||
async def get_all_visualization_data(db: AsyncSession = Depends(get_db)):
|
||||
"""获取所有可视化数据的统一端点
|
||||
@@ -549,7 +623,7 @@ async def get_all_visualization_data(db: AsyncSession = Depends(get_db)):
|
||||
)
|
||||
|
||||
return {
|
||||
"generated_at": datetime.utcnow().isoformat() + "Z",
|
||||
"generated_at": to_iso8601_utc(datetime.now(UTC)),
|
||||
"version": "1.0",
|
||||
"data": {
|
||||
"satellites": satellites,
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect, Query
|
||||
from jose import jwt, JWTError
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.core.websocket.manager import manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -59,6 +60,7 @@ async def websocket_endpoint(
|
||||
"ixp_nodes",
|
||||
"alerts",
|
||||
"dashboard",
|
||||
"datasource_tasks",
|
||||
],
|
||||
},
|
||||
}
|
||||
@@ -72,7 +74,7 @@ async def websocket_endpoint(
|
||||
await websocket.send_json(
|
||||
{
|
||||
"type": "heartbeat",
|
||||
"data": {"action": "pong", "timestamp": datetime.utcnow().isoformat()},
|
||||
"data": {"action": "pong", "timestamp": to_iso8601_utc(datetime.now(UTC))},
|
||||
}
|
||||
)
|
||||
elif data.get("type") == "subscribe":
|
||||
|
||||
Reference in New Issue
Block a user