feat: persist system settings and refine admin layouts
This commit is contained in:
@@ -1,155 +1,66 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import select, func
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import get_current_user
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.task import CollectionTask
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.core.security import get_current_user
|
||||
from app.services.collectors.registry import collector_registry
|
||||
from app.models.user import User
|
||||
from app.services.scheduler import run_collector_now, sync_datasource_job
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
COLLECTOR_INFO = {
|
||||
"top500": {
|
||||
"id": 1,
|
||||
"name": "TOP500 Supercomputers",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_hours": 4,
|
||||
},
|
||||
"epoch_ai_gpu": {
|
||||
"id": 2,
|
||||
"name": "Epoch AI GPU Clusters",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_hours": 6,
|
||||
},
|
||||
"huggingface_models": {
|
||||
"id": 3,
|
||||
"name": "HuggingFace Models",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 12,
|
||||
},
|
||||
"huggingface_datasets": {
|
||||
"id": 4,
|
||||
"name": "HuggingFace Datasets",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 12,
|
||||
},
|
||||
"huggingface_spaces": {
|
||||
"id": 5,
|
||||
"name": "HuggingFace Spaces",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"peeringdb_ixp": {
|
||||
"id": 6,
|
||||
"name": "PeeringDB IXP",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"peeringdb_network": {
|
||||
"id": 7,
|
||||
"name": "PeeringDB Networks",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 48,
|
||||
},
|
||||
"peeringdb_facility": {
|
||||
"id": 8,
|
||||
"name": "PeeringDB Facilities",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 48,
|
||||
},
|
||||
"telegeography_cables": {
|
||||
"id": 9,
|
||||
"name": "Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"telegeography_landing": {
|
||||
"id": 10,
|
||||
"name": "Cable Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"telegeography_systems": {
|
||||
"id": 11,
|
||||
"name": "Cable Systems",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"arcgis_cables": {
|
||||
"id": 15,
|
||||
"name": "ArcGIS Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"arcgis_landing_points": {
|
||||
"id": 16,
|
||||
"name": "ArcGIS Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"arcgis_cable_landing_relation": {
|
||||
"id": 17,
|
||||
"name": "ArcGIS Cable-Landing Relations",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"fao_landing_points": {
|
||||
"id": 18,
|
||||
"name": "FAO Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"spacetrack_tle": {
|
||||
"id": 19,
|
||||
"name": "Space-Track TLE",
|
||||
"module": "L3",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"celestrak_tle": {
|
||||
"id": 20,
|
||||
"name": "CelesTrak TLE",
|
||||
"module": "L3",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
}
|
||||
|
||||
ID_TO_COLLECTOR = {info["id"]: name for name, info in COLLECTOR_INFO.items()}
|
||||
COLLECTOR_TO_ID = {name: info["id"] for name, info in COLLECTOR_INFO.items()}
|
||||
def format_frequency_label(minutes: int) -> str:
|
||||
if minutes % 1440 == 0:
|
||||
return f"{minutes // 1440}d"
|
||||
if minutes % 60 == 0:
|
||||
return f"{minutes // 60}h"
|
||||
return f"{minutes}m"
|
||||
|
||||
|
||||
def get_collector_name(source_id: str) -> Optional[str]:
|
||||
async def get_datasource_record(db: AsyncSession, source_id: str) -> Optional[DataSource]:
|
||||
datasource = None
|
||||
try:
|
||||
numeric_id = int(source_id)
|
||||
if numeric_id in ID_TO_COLLECTOR:
|
||||
return ID_TO_COLLECTOR[numeric_id]
|
||||
datasource = await db.get(DataSource, int(source_id))
|
||||
except ValueError:
|
||||
pass
|
||||
if source_id in COLLECTOR_INFO:
|
||||
return source_id
|
||||
return None
|
||||
|
||||
if datasource is not None:
|
||||
return datasource
|
||||
|
||||
result = await db.execute(
|
||||
select(DataSource).where(
|
||||
(DataSource.source == source_id) | (DataSource.collector_class == source_id)
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def get_last_completed_task(db: AsyncSession, datasource_id: int) -> Optional[CollectionTask]:
|
||||
result = await db.execute(
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == datasource_id)
|
||||
.where(CollectionTask.completed_at.isnot(None))
|
||||
.order_by(CollectionTask.completed_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def get_running_task(db: AsyncSession, datasource_id: int) -> Optional[CollectionTask]:
|
||||
result = await db.execute(
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == datasource_id)
|
||||
.where(CollectionTask.status == "running")
|
||||
.order_by(CollectionTask.started_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
@router.get("")
|
||||
@@ -160,48 +71,24 @@ async def list_datasources(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
query = select(DataSource)
|
||||
|
||||
filters = []
|
||||
query = select(DataSource).order_by(DataSource.module, DataSource.id)
|
||||
if module:
|
||||
filters.append(DataSource.module == module)
|
||||
query = query.where(DataSource.module == module)
|
||||
if is_active is not None:
|
||||
filters.append(DataSource.is_active == is_active)
|
||||
query = query.where(DataSource.is_active == is_active)
|
||||
if priority:
|
||||
filters.append(DataSource.priority == priority)
|
||||
|
||||
if filters:
|
||||
query = query.where(*filters)
|
||||
query = query.where(DataSource.priority == priority)
|
||||
|
||||
result = await db.execute(query)
|
||||
datasources = result.scalars().all()
|
||||
|
||||
collector_list = []
|
||||
for name, info in COLLECTOR_INFO.items():
|
||||
is_active_status = collector_registry.is_active(name)
|
||||
|
||||
running_task_query = (
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == info["id"])
|
||||
.where(CollectionTask.status == "running")
|
||||
.order_by(CollectionTask.started_at.desc())
|
||||
.limit(1)
|
||||
for datasource in datasources:
|
||||
running_task = await get_running_task(db, datasource.id)
|
||||
last_task = await get_last_completed_task(db, datasource.id)
|
||||
data_count_result = await db.execute(
|
||||
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
|
||||
)
|
||||
running_result = await db.execute(running_task_query)
|
||||
running_task = running_result.scalar_one_or_none()
|
||||
|
||||
last_run_query = (
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == info["id"])
|
||||
.where(CollectionTask.completed_at.isnot(None))
|
||||
.order_by(CollectionTask.completed_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
last_run_result = await db.execute(last_run_query)
|
||||
last_task = last_run_result.scalar_one_or_none()
|
||||
|
||||
data_count_query = select(func.count(CollectedData.id)).where(CollectedData.source == name)
|
||||
data_count_result = await db.execute(data_count_query)
|
||||
data_count = data_count_result.scalar() or 0
|
||||
|
||||
last_run = None
|
||||
@@ -210,13 +97,14 @@ async def list_datasources(
|
||||
|
||||
collector_list.append(
|
||||
{
|
||||
"id": info["id"],
|
||||
"name": info["name"],
|
||||
"module": info["module"],
|
||||
"priority": info["priority"],
|
||||
"frequency": f"{info['frequency_hours']}h",
|
||||
"is_active": is_active_status,
|
||||
"collector_class": name,
|
||||
"id": datasource.id,
|
||||
"name": datasource.name,
|
||||
"module": datasource.module,
|
||||
"priority": datasource.priority,
|
||||
"frequency": format_frequency_label(datasource.frequency_minutes),
|
||||
"frequency_minutes": datasource.frequency_minutes,
|
||||
"is_active": datasource.is_active,
|
||||
"collector_class": datasource.collector_class,
|
||||
"last_run": last_run,
|
||||
"is_running": running_task is not None,
|
||||
"task_id": running_task.id if running_task else None,
|
||||
@@ -226,15 +114,7 @@ async def list_datasources(
|
||||
}
|
||||
)
|
||||
|
||||
if module:
|
||||
collector_list = [c for c in collector_list if c["module"] == module]
|
||||
if priority:
|
||||
collector_list = [c for c in collector_list if c["priority"] == priority]
|
||||
|
||||
return {
|
||||
"total": len(collector_list),
|
||||
"data": collector_list,
|
||||
}
|
||||
return {"total": len(collector_list), "data": collector_list}
|
||||
|
||||
|
||||
@router.get("/{source_id}")
|
||||
@@ -243,19 +123,20 @@ async def get_datasource(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
return {
|
||||
"id": info["id"],
|
||||
"name": info["name"],
|
||||
"module": info["module"],
|
||||
"priority": info["priority"],
|
||||
"frequency": f"{info['frequency_hours']}h",
|
||||
"collector_class": collector_name,
|
||||
"is_active": collector_registry.is_active(collector_name),
|
||||
"id": datasource.id,
|
||||
"name": datasource.name,
|
||||
"module": datasource.module,
|
||||
"priority": datasource.priority,
|
||||
"frequency": format_frequency_label(datasource.frequency_minutes),
|
||||
"frequency_minutes": datasource.frequency_minutes,
|
||||
"collector_class": datasource.collector_class,
|
||||
"source": datasource.source,
|
||||
"is_active": datasource.is_active,
|
||||
}
|
||||
|
||||
|
||||
@@ -263,24 +144,32 @@ async def get_datasource(
|
||||
async def enable_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
collector_registry.set_active(collector_name, True)
|
||||
return {"status": "enabled", "source_id": source_id}
|
||||
|
||||
datasource.is_active = True
|
||||
await db.commit()
|
||||
await sync_datasource_job(datasource.id)
|
||||
return {"status": "enabled", "source_id": datasource.id}
|
||||
|
||||
|
||||
@router.post("/{source_id}/disable")
|
||||
async def disable_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
collector_registry.set_active(collector_name, False)
|
||||
return {"status": "disabled", "source_id": source_id}
|
||||
|
||||
datasource.is_active = False
|
||||
await db.commit()
|
||||
await sync_datasource_job(datasource.id)
|
||||
return {"status": "disabled", "source_id": datasource.id}
|
||||
|
||||
|
||||
@router.get("/{source_id}/stats")
|
||||
@@ -289,26 +178,19 @@ async def get_datasource_stats(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
source_name = info["name"]
|
||||
|
||||
query = select(func.count(CollectedData.id)).where(CollectedData.source == collector_name)
|
||||
result = await db.execute(query)
|
||||
result = await db.execute(
|
||||
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
|
||||
)
|
||||
total = result.scalar() or 0
|
||||
|
||||
if total == 0:
|
||||
query = select(func.count(CollectedData.id)).where(CollectedData.source == source_name)
|
||||
result = await db.execute(query)
|
||||
total = result.scalar() or 0
|
||||
|
||||
return {
|
||||
"source_id": source_id,
|
||||
"collector_name": collector_name,
|
||||
"name": info["name"],
|
||||
"source_id": datasource.id,
|
||||
"collector_name": datasource.collector_class,
|
||||
"name": datasource.name,
|
||||
"total_records": total,
|
||||
}
|
||||
|
||||
@@ -317,30 +199,25 @@ async def get_datasource_stats(
|
||||
async def trigger_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
from app.services.scheduler import run_collector_now
|
||||
|
||||
if not collector_registry.is_active(collector_name):
|
||||
if not datasource.is_active:
|
||||
raise HTTPException(status_code=400, detail="Data source is disabled")
|
||||
|
||||
success = run_collector_now(collector_name)
|
||||
success = run_collector_now(datasource.source)
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to trigger collector '{datasource.source}'")
|
||||
|
||||
if success:
|
||||
return {
|
||||
"status": "triggered",
|
||||
"source_id": source_id,
|
||||
"collector_name": collector_name,
|
||||
"message": f"Collector '{collector_name}' has been triggered",
|
||||
}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger collector '{collector_name}'",
|
||||
)
|
||||
return {
|
||||
"status": "triggered",
|
||||
"source_id": datasource.id,
|
||||
"collector_name": datasource.source,
|
||||
"message": f"Collector '{datasource.source}' has been triggered",
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/{source_id}/data")
|
||||
@@ -349,39 +226,25 @@ async def clear_datasource_data(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
source_name = info["name"]
|
||||
|
||||
query = select(func.count(CollectedData.id)).where(CollectedData.source == collector_name)
|
||||
result = await db.execute(query)
|
||||
result = await db.execute(
|
||||
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
|
||||
)
|
||||
count = result.scalar() or 0
|
||||
|
||||
if count == 0:
|
||||
query = select(func.count(CollectedData.id)).where(CollectedData.source == source_name)
|
||||
result = await db.execute(query)
|
||||
count = result.scalar() or 0
|
||||
delete_source = source_name
|
||||
else:
|
||||
delete_source = collector_name
|
||||
return {"status": "success", "message": "No data to clear", "deleted_count": 0}
|
||||
|
||||
if count == 0:
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "No data to clear",
|
||||
"deleted_count": 0,
|
||||
}
|
||||
|
||||
delete_query = CollectedData.__table__.delete().where(CollectedData.source == delete_source)
|
||||
delete_query = CollectedData.__table__.delete().where(CollectedData.source == datasource.source)
|
||||
await db.execute(delete_query)
|
||||
await db.commit()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Cleared {count} records for data source '{info['name']}'",
|
||||
"message": f"Cleared {count} records for data source '{datasource.name}'",
|
||||
"deleted_count": count,
|
||||
}
|
||||
|
||||
@@ -391,22 +254,11 @@ async def get_task_status(
|
||||
source_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
|
||||
running_task_query = (
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == info["id"])
|
||||
.where(CollectionTask.status == "running")
|
||||
.order_by(CollectionTask.started_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
running_result = await db.execute(running_task_query)
|
||||
running_task = running_result.scalar_one_or_none()
|
||||
|
||||
running_task = await get_running_task(db, datasource.id)
|
||||
if not running_task:
|
||||
return {"is_running": False, "task_id": None, "progress": None}
|
||||
|
||||
@@ -417,4 +269,4 @@ async def get_task_status(
|
||||
"records_processed": running_task.records_processed,
|
||||
"total_records": running_task.total_records,
|
||||
"status": running_task.status,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,21 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel, EmailStr
|
||||
|
||||
from app.models.user import User
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel, EmailStr, Field
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import get_current_user
|
||||
from app.db.session import get_db
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.system_setting import SystemSetting
|
||||
from app.models.user import User
|
||||
from app.services.scheduler import sync_datasource_job
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
default_settings = {
|
||||
DEFAULT_SETTINGS = {
|
||||
"system": {
|
||||
"system_name": "智能星球",
|
||||
"refresh_interval": 60,
|
||||
@@ -29,17 +37,13 @@ default_settings = {
|
||||
},
|
||||
}
|
||||
|
||||
system_settings = default_settings["system"].copy()
|
||||
notification_settings = default_settings["notifications"].copy()
|
||||
security_settings = default_settings["security"].copy()
|
||||
|
||||
|
||||
class SystemSettingsUpdate(BaseModel):
|
||||
system_name: str = "智能星球"
|
||||
refresh_interval: int = 60
|
||||
refresh_interval: int = Field(default=60, ge=10, le=3600)
|
||||
auto_refresh: bool = True
|
||||
data_retention_days: int = 30
|
||||
max_concurrent_tasks: int = 5
|
||||
data_retention_days: int = Field(default=30, ge=1, le=3650)
|
||||
max_concurrent_tasks: int = Field(default=5, ge=1, le=50)
|
||||
|
||||
|
||||
class NotificationSettingsUpdate(BaseModel):
|
||||
@@ -51,60 +55,166 @@ class NotificationSettingsUpdate(BaseModel):
|
||||
|
||||
|
||||
class SecuritySettingsUpdate(BaseModel):
|
||||
session_timeout: int = 60
|
||||
max_login_attempts: int = 5
|
||||
password_policy: str = "medium"
|
||||
session_timeout: int = Field(default=60, ge=5, le=1440)
|
||||
max_login_attempts: int = Field(default=5, ge=1, le=20)
|
||||
password_policy: str = Field(default="medium")
|
||||
|
||||
|
||||
class CollectorSettingsUpdate(BaseModel):
|
||||
is_active: bool
|
||||
priority: str = Field(default="P1")
|
||||
frequency_minutes: int = Field(default=60, ge=1, le=10080)
|
||||
|
||||
|
||||
def merge_with_defaults(category: str, payload: Optional[dict]) -> dict:
|
||||
merged = DEFAULT_SETTINGS[category].copy()
|
||||
if payload:
|
||||
merged.update(payload)
|
||||
return merged
|
||||
|
||||
|
||||
async def get_setting_record(db: AsyncSession, category: str) -> Optional[SystemSetting]:
|
||||
result = await db.execute(select(SystemSetting).where(SystemSetting.category == category))
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def get_setting_payload(db: AsyncSession, category: str) -> dict:
|
||||
record = await get_setting_record(db, category)
|
||||
return merge_with_defaults(category, record.payload if record else None)
|
||||
|
||||
|
||||
async def save_setting_payload(db: AsyncSession, category: str, payload: dict) -> dict:
|
||||
record = await get_setting_record(db, category)
|
||||
if record is None:
|
||||
record = SystemSetting(category=category, payload=payload)
|
||||
db.add(record)
|
||||
else:
|
||||
record.payload = payload
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(record)
|
||||
return merge_with_defaults(category, record.payload)
|
||||
|
||||
|
||||
def format_frequency_label(minutes: int) -> str:
|
||||
if minutes % 1440 == 0:
|
||||
return f"{minutes // 1440}d"
|
||||
if minutes % 60 == 0:
|
||||
return f"{minutes // 60}h"
|
||||
return f"{minutes}m"
|
||||
|
||||
|
||||
def serialize_collector(datasource: DataSource) -> dict:
|
||||
return {
|
||||
"id": datasource.id,
|
||||
"name": datasource.name,
|
||||
"source": datasource.source,
|
||||
"module": datasource.module,
|
||||
"priority": datasource.priority,
|
||||
"frequency_minutes": datasource.frequency_minutes,
|
||||
"frequency": format_frequency_label(datasource.frequency_minutes),
|
||||
"is_active": datasource.is_active,
|
||||
"last_run_at": datasource.last_run_at.isoformat() if datasource.last_run_at else None,
|
||||
"last_status": datasource.last_status,
|
||||
"next_run_at": datasource.next_run_at.isoformat() if datasource.next_run_at else None,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/system")
|
||||
async def get_system_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"system": system_settings}
|
||||
async def get_system_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
return {"system": await get_setting_payload(db, "system")}
|
||||
|
||||
|
||||
@router.put("/system")
|
||||
async def update_system_settings(
|
||||
settings: SystemSettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
global system_settings
|
||||
system_settings = settings.model_dump()
|
||||
return {"status": "updated", "system": system_settings}
|
||||
payload = await save_setting_payload(db, "system", settings.model_dump())
|
||||
return {"status": "updated", "system": payload}
|
||||
|
||||
|
||||
@router.get("/notifications")
|
||||
async def get_notification_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"notifications": notification_settings}
|
||||
async def get_notification_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
return {"notifications": await get_setting_payload(db, "notifications")}
|
||||
|
||||
|
||||
@router.put("/notifications")
|
||||
async def update_notification_settings(
|
||||
settings: NotificationSettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
global notification_settings
|
||||
notification_settings = settings.model_dump()
|
||||
return {"status": "updated", "notifications": notification_settings}
|
||||
payload = await save_setting_payload(db, "notifications", settings.model_dump())
|
||||
return {"status": "updated", "notifications": payload}
|
||||
|
||||
|
||||
@router.get("/security")
|
||||
async def get_security_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"security": security_settings}
|
||||
async def get_security_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
return {"security": await get_setting_payload(db, "security")}
|
||||
|
||||
|
||||
@router.put("/security")
|
||||
async def update_security_settings(
|
||||
settings: SecuritySettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
global security_settings
|
||||
security_settings = settings.model_dump()
|
||||
return {"status": "updated", "security": security_settings}
|
||||
payload = await save_setting_payload(db, "security", settings.model_dump())
|
||||
return {"status": "updated", "security": payload}
|
||||
|
||||
|
||||
@router.get("/collectors")
|
||||
async def get_collector_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(DataSource).order_by(DataSource.module, DataSource.id))
|
||||
datasources = result.scalars().all()
|
||||
return {"collectors": [serialize_collector(datasource) for datasource in datasources]}
|
||||
|
||||
|
||||
@router.put("/collectors/{datasource_id}")
|
||||
async def update_collector_settings(
|
||||
datasource_id: int,
|
||||
settings: CollectorSettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
datasource = await db.get(DataSource, datasource_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
datasource.is_active = settings.is_active
|
||||
datasource.priority = settings.priority
|
||||
datasource.frequency_minutes = settings.frequency_minutes
|
||||
await db.commit()
|
||||
await db.refresh(datasource)
|
||||
await sync_datasource_job(datasource.id)
|
||||
return {"status": "updated", "collector": serialize_collector(datasource)}
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def get_all_settings(current_user: User = Depends(get_current_user)):
|
||||
async def get_all_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(DataSource).order_by(DataSource.module, DataSource.id))
|
||||
datasources = result.scalars().all()
|
||||
return {
|
||||
"system": system_settings,
|
||||
"notifications": notification_settings,
|
||||
"security": security_settings,
|
||||
}
|
||||
"system": await get_setting_payload(db, "system"),
|
||||
"notifications": await get_setting_payload(db, "notifications"),
|
||||
"security": await get_setting_payload(db, "security"),
|
||||
"collectors": [serialize_collector(datasource) for datasource in datasources],
|
||||
"generated_at": datetime.utcnow().isoformat() + "Z",
|
||||
}
|
||||
|
||||
126
backend/app/core/datasource_defaults.py
Normal file
126
backend/app/core/datasource_defaults.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""Default built-in datasource definitions."""
|
||||
|
||||
DEFAULT_DATASOURCES = {
|
||||
"top500": {
|
||||
"id": 1,
|
||||
"name": "TOP500 Supercomputers",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_minutes": 240,
|
||||
},
|
||||
"epoch_ai_gpu": {
|
||||
"id": 2,
|
||||
"name": "Epoch AI GPU Clusters",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_minutes": 360,
|
||||
},
|
||||
"huggingface_models": {
|
||||
"id": 3,
|
||||
"name": "HuggingFace Models",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 720,
|
||||
},
|
||||
"huggingface_datasets": {
|
||||
"id": 4,
|
||||
"name": "HuggingFace Datasets",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 720,
|
||||
},
|
||||
"huggingface_spaces": {
|
||||
"id": 5,
|
||||
"name": "HuggingFace Spaces",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
"peeringdb_ixp": {
|
||||
"id": 6,
|
||||
"name": "PeeringDB IXP",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
"peeringdb_network": {
|
||||
"id": 7,
|
||||
"name": "PeeringDB Networks",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 2880,
|
||||
},
|
||||
"peeringdb_facility": {
|
||||
"id": 8,
|
||||
"name": "PeeringDB Facilities",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 2880,
|
||||
},
|
||||
"telegeography_cables": {
|
||||
"id": 9,
|
||||
"name": "Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"telegeography_landing": {
|
||||
"id": 10,
|
||||
"name": "Cable Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"telegeography_systems": {
|
||||
"id": 11,
|
||||
"name": "Cable Systems",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"arcgis_cables": {
|
||||
"id": 15,
|
||||
"name": "ArcGIS Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"arcgis_landing_points": {
|
||||
"id": 16,
|
||||
"name": "ArcGIS Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"arcgis_cable_landing_relation": {
|
||||
"id": 17,
|
||||
"name": "ArcGIS Cable-Landing Relations",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"fao_landing_points": {
|
||||
"id": 18,
|
||||
"name": "FAO Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"spacetrack_tle": {
|
||||
"id": 19,
|
||||
"name": "Space-Track TLE",
|
||||
"module": "L3",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
"celestrak_tle": {
|
||||
"id": 20,
|
||||
"name": "CelesTrak TLE",
|
||||
"module": "L3",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
}
|
||||
|
||||
ID_TO_COLLECTOR = {info["id"]: name for name, info in DEFAULT_DATASOURCES.items()}
|
||||
COLLECTOR_TO_ID = {name: info["id"] for name, info in DEFAULT_DATASOURCES.items()}
|
||||
@@ -25,11 +25,52 @@ async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
raise
|
||||
|
||||
|
||||
async def seed_default_datasources(session: AsyncSession):
|
||||
from app.core.datasource_defaults import DEFAULT_DATASOURCES
|
||||
from app.models.datasource import DataSource
|
||||
|
||||
for source, info in DEFAULT_DATASOURCES.items():
|
||||
existing = await session.get(DataSource, info["id"])
|
||||
if existing:
|
||||
existing.name = info["name"]
|
||||
existing.source = source
|
||||
existing.module = info["module"]
|
||||
existing.priority = info["priority"]
|
||||
existing.frequency_minutes = info["frequency_minutes"]
|
||||
existing.collector_class = source
|
||||
if existing.config is None:
|
||||
existing.config = "{}"
|
||||
continue
|
||||
|
||||
session.add(
|
||||
DataSource(
|
||||
id=info["id"],
|
||||
name=info["name"],
|
||||
source=source,
|
||||
module=info["module"],
|
||||
priority=info["priority"],
|
||||
frequency_minutes=info["frequency_minutes"],
|
||||
collector_class=source,
|
||||
config="{}",
|
||||
is_active=True,
|
||||
)
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
||||
|
||||
async def init_db():
|
||||
import app.models.user # noqa: F401
|
||||
import app.models.gpu_cluster # noqa: F401
|
||||
import app.models.task # noqa: F401
|
||||
import app.models.datasource # noqa: F401
|
||||
import app.models.datasource_config # noqa: F401
|
||||
import app.models.alert # noqa: F401
|
||||
import app.models.collected_data # noqa: F401
|
||||
import app.models.system_setting # noqa: F401
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async with async_session_factory() as session:
|
||||
await seed_default_datasources(session)
|
||||
|
||||
@@ -2,15 +2,14 @@ from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.websocket.broadcaster import broadcaster
|
||||
from app.db.session import init_db, async_session_factory
|
||||
from app.api.main import api_router
|
||||
from app.api.v1 import websocket
|
||||
from app.services.scheduler import start_scheduler, stop_scheduler
|
||||
from app.core.config import settings
|
||||
from app.core.websocket.broadcaster import broadcaster
|
||||
from app.db.session import init_db
|
||||
from app.services.scheduler import start_scheduler, stop_scheduler, sync_scheduler_with_datasources
|
||||
|
||||
|
||||
class WebSocketCORSMiddleware(BaseHTTPMiddleware):
|
||||
@@ -28,6 +27,7 @@ class WebSocketCORSMiddleware(BaseHTTPMiddleware):
|
||||
async def lifespan(app: FastAPI):
|
||||
await init_db()
|
||||
start_scheduler()
|
||||
await sync_scheduler_with_datasources()
|
||||
broadcaster.start()
|
||||
yield
|
||||
broadcaster.stop()
|
||||
@@ -60,16 +60,11 @@ app.include_router(websocket.router)
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""健康检查端点"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"version": settings.VERSION,
|
||||
}
|
||||
return {"status": "healthy", "version": settings.VERSION}
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""API根目录"""
|
||||
return {
|
||||
"name": settings.PROJECT_NAME,
|
||||
"version": settings.VERSION,
|
||||
@@ -80,7 +75,6 @@ async def root():
|
||||
|
||||
@app.get("/api/v1/scheduler/jobs")
|
||||
async def get_scheduler_jobs():
|
||||
"""获取调度任务列表"""
|
||||
from app.services.scheduler import get_scheduler_jobs
|
||||
|
||||
return {"jobs": get_scheduler_jobs()}
|
||||
return {"jobs": get_scheduler_jobs()}
|
||||
|
||||
@@ -2,14 +2,18 @@ from app.models.user import User
|
||||
from app.models.gpu_cluster import GPUCluster
|
||||
from app.models.task import CollectionTask
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.datasource_config import DataSourceConfig
|
||||
from app.models.alert import Alert, AlertSeverity, AlertStatus
|
||||
from app.models.system_setting import SystemSetting
|
||||
|
||||
__all__ = [
|
||||
"User",
|
||||
"GPUCluster",
|
||||
"CollectionTask",
|
||||
"DataSource",
|
||||
"DataSourceConfig",
|
||||
"SystemSetting",
|
||||
"Alert",
|
||||
"AlertSeverity",
|
||||
"AlertStatus",
|
||||
]
|
||||
]
|
||||
|
||||
19
backend/app/models/system_setting.py
Normal file
19
backend/app/models/system_setting.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Persistent system settings model."""
|
||||
|
||||
from sqlalchemy import JSON, Column, DateTime, Integer, String, UniqueConstraint
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class SystemSetting(Base):
|
||||
__tablename__ = "system_settings"
|
||||
__table_args__ = (UniqueConstraint("category", name="uq_system_settings_category"),)
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
category = Column(String(50), nullable=False)
|
||||
payload = Column(JSON, nullable=False, default={})
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SystemSetting {self.category}>"
|
||||
@@ -1,15 +1,16 @@
|
||||
"""Task Scheduler for running collection jobs"""
|
||||
"""Task Scheduler for running collection jobs."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict
|
||||
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
|
||||
from app.db.session import async_session_factory
|
||||
from app.models.datasource import DataSource
|
||||
from app.services.collectors.registry import collector_registry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -17,77 +18,119 @@ logger = logging.getLogger(__name__)
|
||||
scheduler = AsyncIOScheduler()
|
||||
|
||||
|
||||
COLLECTOR_TO_ID = {
|
||||
"top500": 1,
|
||||
"epoch_ai_gpu": 2,
|
||||
"huggingface_models": 3,
|
||||
"huggingface_datasets": 4,
|
||||
"huggingface_spaces": 5,
|
||||
"peeringdb_ixp": 6,
|
||||
"peeringdb_network": 7,
|
||||
"peeringdb_facility": 8,
|
||||
"telegeography_cables": 9,
|
||||
"telegeography_landing": 10,
|
||||
"telegeography_systems": 11,
|
||||
"arcgis_cables": 15,
|
||||
"arcgis_landing_points": 16,
|
||||
"arcgis_cable_landing_relation": 17,
|
||||
"fao_landing_points": 18,
|
||||
"spacetrack_tle": 19,
|
||||
"celestrak_tle": 20,
|
||||
}
|
||||
async def _update_next_run_at(datasource: DataSource, session) -> None:
|
||||
job = scheduler.get_job(datasource.source)
|
||||
datasource.next_run_at = job.next_run_time if job else None
|
||||
await session.commit()
|
||||
|
||||
|
||||
async def _apply_datasource_schedule(datasource: DataSource, session) -> None:
|
||||
collector = collector_registry.get(datasource.source)
|
||||
if not collector:
|
||||
logger.warning("Collector not found for datasource %s", datasource.source)
|
||||
return
|
||||
|
||||
collector_registry.set_active(datasource.source, datasource.is_active)
|
||||
|
||||
existing_job = scheduler.get_job(datasource.source)
|
||||
if existing_job:
|
||||
scheduler.remove_job(datasource.source)
|
||||
|
||||
if datasource.is_active:
|
||||
scheduler.add_job(
|
||||
run_collector_task,
|
||||
trigger=IntervalTrigger(minutes=max(1, datasource.frequency_minutes)),
|
||||
id=datasource.source,
|
||||
name=datasource.name,
|
||||
replace_existing=True,
|
||||
kwargs={"collector_name": datasource.source},
|
||||
)
|
||||
logger.info(
|
||||
"Scheduled collector: %s (every %sm)",
|
||||
datasource.source,
|
||||
datasource.frequency_minutes,
|
||||
)
|
||||
else:
|
||||
logger.info("Collector disabled: %s", datasource.source)
|
||||
|
||||
await _update_next_run_at(datasource, session)
|
||||
|
||||
|
||||
async def run_collector_task(collector_name: str):
|
||||
"""Run a single collector task"""
|
||||
"""Run a single collector task."""
|
||||
collector = collector_registry.get(collector_name)
|
||||
if not collector:
|
||||
logger.error(f"Collector not found: {collector_name}")
|
||||
logger.error("Collector not found: %s", collector_name)
|
||||
return
|
||||
|
||||
# Get the correct datasource_id
|
||||
datasource_id = COLLECTOR_TO_ID.get(collector_name, 1)
|
||||
|
||||
async with async_session_factory() as db:
|
||||
result = await db.execute(select(DataSource).where(DataSource.source == collector_name))
|
||||
datasource = result.scalar_one_or_none()
|
||||
if not datasource:
|
||||
logger.error("Datasource not found for collector: %s", collector_name)
|
||||
return
|
||||
|
||||
if not datasource.is_active:
|
||||
logger.info("Skipping disabled collector: %s", collector_name)
|
||||
return
|
||||
|
||||
try:
|
||||
# Set the datasource_id on the collector instance
|
||||
collector._datasource_id = datasource_id
|
||||
|
||||
logger.info(f"Running collector: {collector_name} (datasource_id={datasource_id})")
|
||||
result = await collector.run(db)
|
||||
logger.info(f"Collector {collector_name} completed: {result}")
|
||||
except Exception as e:
|
||||
logger.error(f"Collector {collector_name} failed: {e}")
|
||||
collector._datasource_id = datasource.id
|
||||
logger.info("Running collector: %s (datasource_id=%s)", collector_name, datasource.id)
|
||||
task_result = await collector.run(db)
|
||||
datasource.last_run_at = datetime.utcnow()
|
||||
datasource.last_status = task_result.get("status")
|
||||
await _update_next_run_at(datasource, db)
|
||||
logger.info("Collector %s completed: %s", collector_name, task_result)
|
||||
except Exception as exc:
|
||||
datasource.last_run_at = datetime.utcnow()
|
||||
datasource.last_status = "failed"
|
||||
await db.commit()
|
||||
logger.exception("Collector %s failed: %s", collector_name, exc)
|
||||
|
||||
|
||||
def start_scheduler():
|
||||
"""Start the scheduler with all registered collectors"""
|
||||
collectors = collector_registry.all()
|
||||
|
||||
for name, collector in collectors.items():
|
||||
if collector_registry.is_active(name):
|
||||
scheduler.add_job(
|
||||
run_collector_task,
|
||||
trigger=IntervalTrigger(hours=collector.frequency_hours),
|
||||
id=name,
|
||||
name=name,
|
||||
replace_existing=True,
|
||||
kwargs={"collector_name": name},
|
||||
)
|
||||
logger.info(f"Scheduled collector: {name} (every {collector.frequency_hours}h)")
|
||||
|
||||
scheduler.start()
|
||||
logger.info("Scheduler started")
|
||||
def start_scheduler() -> None:
|
||||
"""Start the scheduler."""
|
||||
if not scheduler.running:
|
||||
scheduler.start()
|
||||
logger.info("Scheduler started")
|
||||
|
||||
|
||||
def stop_scheduler():
|
||||
"""Stop the scheduler"""
|
||||
scheduler.shutdown()
|
||||
logger.info("Scheduler stopped")
|
||||
def stop_scheduler() -> None:
|
||||
"""Stop the scheduler."""
|
||||
if scheduler.running:
|
||||
scheduler.shutdown(wait=False)
|
||||
logger.info("Scheduler stopped")
|
||||
|
||||
|
||||
async def sync_scheduler_with_datasources() -> None:
|
||||
"""Synchronize scheduler jobs with datasource table."""
|
||||
async with async_session_factory() as db:
|
||||
result = await db.execute(select(DataSource).order_by(DataSource.id))
|
||||
datasources = result.scalars().all()
|
||||
|
||||
configured_sources = {datasource.source for datasource in datasources}
|
||||
for job in list(scheduler.get_jobs()):
|
||||
if job.id not in configured_sources:
|
||||
scheduler.remove_job(job.id)
|
||||
|
||||
for datasource in datasources:
|
||||
await _apply_datasource_schedule(datasource, db)
|
||||
|
||||
|
||||
async def sync_datasource_job(datasource_id: int) -> bool:
|
||||
"""Synchronize a single datasource job after settings changes."""
|
||||
async with async_session_factory() as db:
|
||||
datasource = await db.get(DataSource, datasource_id)
|
||||
if not datasource:
|
||||
return False
|
||||
|
||||
await _apply_datasource_schedule(datasource, db)
|
||||
return True
|
||||
|
||||
|
||||
def get_scheduler_jobs() -> list[Dict[str, Any]]:
|
||||
"""Get all scheduled jobs"""
|
||||
"""Get all scheduled jobs."""
|
||||
jobs = []
|
||||
for job in scheduler.get_jobs():
|
||||
jobs.append(
|
||||
@@ -101,52 +144,17 @@ def get_scheduler_jobs() -> list[Dict[str, Any]]:
|
||||
return jobs
|
||||
|
||||
|
||||
def add_job(collector_name: str, hours: int = 4):
|
||||
"""Add a new scheduled job"""
|
||||
collector = collector_registry.get(collector_name)
|
||||
if not collector:
|
||||
raise ValueError(f"Collector not found: {collector_name}")
|
||||
|
||||
scheduler.add_job(
|
||||
run_collector_task,
|
||||
trigger=IntervalTrigger(hours=hours),
|
||||
id=collector_name,
|
||||
name=collector_name,
|
||||
replace_existing=True,
|
||||
kwargs={"collector_name": collector_name},
|
||||
)
|
||||
logger.info(f"Added scheduled job: {collector_name} (every {hours}h)")
|
||||
|
||||
|
||||
def remove_job(collector_name: str):
|
||||
"""Remove a scheduled job"""
|
||||
scheduler.remove_job(collector_name)
|
||||
logger.info(f"Removed scheduled job: {collector_name}")
|
||||
|
||||
|
||||
def pause_job(collector_name: str):
|
||||
"""Pause a scheduled job"""
|
||||
scheduler.pause_job(collector_name)
|
||||
logger.info(f"Paused job: {collector_name}")
|
||||
|
||||
|
||||
def resume_job(collector_name: str):
|
||||
"""Resume a scheduled job"""
|
||||
scheduler.resume_job(collector_name)
|
||||
logger.info(f"Resumed job: {collector_name}")
|
||||
|
||||
|
||||
def run_collector_now(collector_name: str) -> bool:
|
||||
"""Run a collector immediately (not scheduled)"""
|
||||
"""Run a collector immediately (not scheduled)."""
|
||||
collector = collector_registry.get(collector_name)
|
||||
if not collector:
|
||||
logger.error(f"Collector not found: {collector_name}")
|
||||
logger.error("Collector not found: %s", collector_name)
|
||||
return False
|
||||
|
||||
try:
|
||||
asyncio.create_task(run_collector_task(collector_name))
|
||||
logger.info(f"Triggered collector: {collector_name}")
|
||||
logger.info("Triggered collector: %s", collector_name)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to trigger collector {collector_name}: {e}")
|
||||
return False
|
||||
except Exception as exc:
|
||||
logger.error("Failed to trigger collector %s: %s", collector_name, exc)
|
||||
return False
|
||||
|
||||
Reference in New Issue
Block a user