feat: add bgp observability and admin ui improvements
This commit is contained in:
@@ -1,9 +1,12 @@
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.core.security import get_current_user
|
||||
from app.core.data_sources import get_data_sources_config
|
||||
from app.db.session import get_db
|
||||
@@ -24,6 +27,12 @@ def format_frequency_label(minutes: int) -> str:
|
||||
return f"{minutes}m"
|
||||
|
||||
|
||||
def is_due_for_collection(datasource: DataSource, now: datetime) -> bool:
|
||||
if datasource.last_run_at is None:
|
||||
return True
|
||||
return datasource.last_run_at + timedelta(minutes=datasource.frequency_minutes) <= now
|
||||
|
||||
|
||||
async def get_datasource_record(db: AsyncSession, source_id: str) -> Optional[DataSource]:
|
||||
datasource = None
|
||||
try:
|
||||
@@ -47,6 +56,7 @@ async def get_last_completed_task(db: AsyncSession, datasource_id: int) -> Optio
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == datasource_id)
|
||||
.where(CollectionTask.completed_at.isnot(None))
|
||||
.where(CollectionTask.status.in_(("success", "failed", "cancelled")))
|
||||
.order_by(CollectionTask.completed_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
@@ -94,9 +104,9 @@ async def list_datasources(
|
||||
)
|
||||
data_count = data_count_result.scalar() or 0
|
||||
|
||||
last_run = None
|
||||
if last_task and last_task.completed_at and data_count > 0:
|
||||
last_run = last_task.completed_at.strftime("%Y-%m-%d %H:%M")
|
||||
last_run_at = datasource.last_run_at or (last_task.completed_at if last_task else None)
|
||||
last_run = to_iso8601_utc(last_run_at)
|
||||
last_status = datasource.last_status or (last_task.status if last_task else None)
|
||||
|
||||
collector_list.append(
|
||||
{
|
||||
@@ -110,6 +120,10 @@ async def list_datasources(
|
||||
"collector_class": datasource.collector_class,
|
||||
"endpoint": endpoint,
|
||||
"last_run": last_run,
|
||||
"last_run_at": to_iso8601_utc(last_run_at),
|
||||
"last_status": last_status,
|
||||
"last_records_processed": last_task.records_processed if last_task else None,
|
||||
"data_count": data_count,
|
||||
"is_running": running_task is not None,
|
||||
"task_id": running_task.id if running_task else None,
|
||||
"progress": running_task.progress if running_task else None,
|
||||
@@ -122,6 +136,105 @@ async def list_datasources(
|
||||
return {"total": len(collector_list), "data": collector_list}
|
||||
|
||||
|
||||
@router.post("/trigger-all")
|
||||
async def trigger_all_datasources(
|
||||
force: bool = Query(False),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(
|
||||
select(DataSource)
|
||||
.where(DataSource.is_active == True)
|
||||
.order_by(DataSource.module, DataSource.id)
|
||||
)
|
||||
datasources = result.scalars().all()
|
||||
|
||||
if not datasources:
|
||||
return {
|
||||
"status": "noop",
|
||||
"message": "No active data sources to trigger",
|
||||
"triggered": [],
|
||||
"skipped": [],
|
||||
"failed": [],
|
||||
}
|
||||
|
||||
previous_task_ids: dict[int, Optional[int]] = {}
|
||||
triggered_sources: list[dict] = []
|
||||
skipped_sources: list[dict] = []
|
||||
failed_sources: list[dict] = []
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
for datasource in datasources:
|
||||
running_task = await get_running_task(db, datasource.id)
|
||||
if running_task is not None:
|
||||
skipped_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "already_running",
|
||||
"task_id": running_task.id,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
if not force and not is_due_for_collection(datasource, now):
|
||||
skipped_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "within_frequency_window",
|
||||
"last_run_at": to_iso8601_utc(datasource.last_run_at),
|
||||
"next_run_at": to_iso8601_utc(
|
||||
datasource.last_run_at + timedelta(minutes=datasource.frequency_minutes)
|
||||
),
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
previous_task_ids[datasource.id] = await get_latest_task_id_for_datasource(datasource.id)
|
||||
success = run_collector_now(datasource.source)
|
||||
if not success:
|
||||
failed_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "trigger_failed",
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
triggered_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"task_id": None,
|
||||
}
|
||||
)
|
||||
|
||||
for _ in range(20):
|
||||
await asyncio.sleep(0.1)
|
||||
pending = [item for item in triggered_sources if item["task_id"] is None]
|
||||
if not pending:
|
||||
break
|
||||
for item in pending:
|
||||
task_id = await get_latest_task_id_for_datasource(item["id"])
|
||||
if task_id is not None and task_id != previous_task_ids.get(item["id"]):
|
||||
item["task_id"] = task_id
|
||||
|
||||
return {
|
||||
"status": "triggered" if triggered_sources else "partial",
|
||||
"message": f"Triggered {len(triggered_sources)} data sources",
|
||||
"force": force,
|
||||
"triggered": triggered_sources,
|
||||
"skipped": skipped_sources,
|
||||
"failed": failed_sources,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{source_id}")
|
||||
async def get_datasource(
|
||||
source_id: str,
|
||||
@@ -217,15 +330,19 @@ async def trigger_datasource(
|
||||
if not datasource.is_active:
|
||||
raise HTTPException(status_code=400, detail="Data source is disabled")
|
||||
|
||||
previous_task_id = await get_latest_task_id_for_datasource(datasource.id)
|
||||
success = run_collector_now(datasource.source)
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to trigger collector '{datasource.source}'")
|
||||
|
||||
task_id = None
|
||||
for _ in range(10):
|
||||
for _ in range(20):
|
||||
await asyncio.sleep(0.1)
|
||||
task_id = await get_latest_task_id_for_datasource(datasource.id)
|
||||
if task_id is not None:
|
||||
if task_id is not None and task_id != previous_task_id:
|
||||
break
|
||||
if task_id == previous_task_id:
|
||||
task_id = None
|
||||
|
||||
return {
|
||||
"status": "triggered",
|
||||
|
||||
Reference in New Issue
Block a user