feat: add bgp observability and admin ui improvements
This commit is contained in:
@@ -23,6 +23,8 @@ COLLECTOR_URL_KEYS = {
|
||||
"top500": "top500.url",
|
||||
"epoch_ai_gpu": "epoch_ai.gpu_clusters_url",
|
||||
"spacetrack_tle": "spacetrack.tle_query_url",
|
||||
"ris_live_bgp": "ris_live.url",
|
||||
"bgpstream_bgp": "bgpstream.url",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -37,3 +37,9 @@ epoch_ai:
|
||||
spacetrack:
|
||||
base_url: "https://www.space-track.org"
|
||||
tle_query_url: "https://www.space-track.org/basicspacedata/query/class/gp/orderby/EPOCH%20desc/limit/1000/format/json"
|
||||
|
||||
ris_live:
|
||||
url: "https://ris-live.ripe.net/v1/stream/?format=json&client=planet-ris-live"
|
||||
|
||||
bgpstream:
|
||||
url: "https://broker.bgpstream.caida.org/v2"
|
||||
|
||||
@@ -120,6 +120,20 @@ DEFAULT_DATASOURCES = {
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
"ris_live_bgp": {
|
||||
"id": 21,
|
||||
"name": "RIPE RIS Live BGP",
|
||||
"module": "L3",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 15,
|
||||
},
|
||||
"bgpstream_bgp": {
|
||||
"id": 22,
|
||||
"name": "CAIDA BGPStream Backfill",
|
||||
"module": "L3",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 360,
|
||||
},
|
||||
}
|
||||
|
||||
ID_TO_COLLECTOR = {info["id"]: name for name, info in DEFAULT_DATASOURCES.items()}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import bcrypt
|
||||
@@ -49,9 +49,9 @@ def get_password_hash(password: str) -> str:
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
expire = datetime.now(UTC) + expires_delta
|
||||
elif settings.ACCESS_TOKEN_EXPIRE_MINUTES > 0:
|
||||
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
expire = datetime.now(UTC) + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
else:
|
||||
expire = None
|
||||
if expire:
|
||||
@@ -65,7 +65,7 @@ def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -
|
||||
def create_refresh_token(data: dict) -> str:
|
||||
to_encode = data.copy()
|
||||
if settings.REFRESH_TOKEN_EXPIRE_DAYS > 0:
|
||||
expire = datetime.utcnow() + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
expire = datetime.now(UTC) + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
to_encode.update({"exp": expire})
|
||||
to_encode.update({"type": "refresh"})
|
||||
if "sub" in to_encode:
|
||||
|
||||
20
backend/app/core/time.py
Normal file
20
backend/app/core/time.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Time helpers for API serialization."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
|
||||
def ensure_utc(value: datetime | None) -> datetime | None:
|
||||
if value is None:
|
||||
return None
|
||||
if value.tzinfo is None:
|
||||
return value.replace(tzinfo=UTC)
|
||||
return value.astimezone(UTC)
|
||||
|
||||
|
||||
def to_iso8601_utc(value: datetime | None) -> str | None:
|
||||
normalized = ensure_utc(value)
|
||||
if normalized is None:
|
||||
return None
|
||||
return normalized.isoformat().replace("+00:00", "Z")
|
||||
@@ -1,9 +1,10 @@
|
||||
"""Data broadcaster for WebSocket connections"""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.core.websocket.manager import manager
|
||||
|
||||
|
||||
@@ -22,7 +23,7 @@ class DataBroadcaster:
|
||||
"active_datasources": 8,
|
||||
"tasks_today": 45,
|
||||
"success_rate": 97.8,
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
"last_updated": to_iso8601_utc(datetime.now(UTC)),
|
||||
"alerts": {"critical": 0, "warning": 2, "info": 5},
|
||||
}
|
||||
|
||||
@@ -35,7 +36,7 @@ class DataBroadcaster:
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": "dashboard",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"payload": {"stats": stats},
|
||||
},
|
||||
channel="dashboard",
|
||||
@@ -49,7 +50,7 @@ class DataBroadcaster:
|
||||
await manager.broadcast(
|
||||
{
|
||||
"type": "alert_notification",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"data": {"alert": alert},
|
||||
}
|
||||
)
|
||||
@@ -60,7 +61,7 @@ class DataBroadcaster:
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": "gpu_clusters",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"payload": data,
|
||||
}
|
||||
)
|
||||
@@ -71,12 +72,24 @@ class DataBroadcaster:
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": channel,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"payload": data,
|
||||
},
|
||||
channel=channel if channel in manager.active_connections else "all",
|
||||
)
|
||||
|
||||
async def broadcast_datasource_task_update(self, data: Dict[str, Any]):
|
||||
"""Broadcast datasource task progress updates to connected clients."""
|
||||
await manager.broadcast(
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": "datasource_tasks",
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"payload": data,
|
||||
},
|
||||
channel="all",
|
||||
)
|
||||
|
||||
def start(self):
|
||||
"""Start all broadcasters"""
|
||||
if not self.running:
|
||||
|
||||
Reference in New Issue
Block a user