first commit
This commit is contained in:
BIN
backend/app/api/v1/__pycache__/alerts.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/alerts.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/v1/__pycache__/auth.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/auth.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/v1/__pycache__/dashboard.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/dashboard.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/v1/__pycache__/datasource_config.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/datasource_config.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/v1/__pycache__/datasources.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/datasources.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/v1/__pycache__/settings.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/settings.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/v1/__pycache__/tasks.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/tasks.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/v1/__pycache__/users.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/users.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/v1/__pycache__/websocket.cpython-311.pyc
Normal file
BIN
backend/app/api/v1/__pycache__/websocket.cpython-311.pyc
Normal file
Binary file not shown.
124
backend/app/api/v1/alerts.py
Normal file
124
backend/app/api/v1/alerts.py
Normal file
@@ -0,0 +1,124 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy import select, func, case
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.core.security import get_current_user
|
||||
from app.models.alert import Alert, AlertSeverity, AlertStatus
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_alerts(
|
||||
severity: str = None,
|
||||
status: str = None,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
query = select(Alert)
|
||||
|
||||
if severity:
|
||||
query = query.where(Alert.severity == AlertSeverity(severity))
|
||||
if status:
|
||||
query = query.where(Alert.status == AlertStatus(status))
|
||||
|
||||
query = query.order_by(
|
||||
case(
|
||||
(Alert.severity == AlertSeverity.CRITICAL, 1),
|
||||
(Alert.severity == AlertSeverity.WARNING, 2),
|
||||
(Alert.severity == AlertSeverity.INFO, 3),
|
||||
),
|
||||
Alert.created_at.desc(),
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
alerts = result.scalars().all()
|
||||
|
||||
total_query = select(func.count(Alert.id))
|
||||
if severity:
|
||||
total_query = total_query.where(Alert.severity == AlertSeverity(severity))
|
||||
if status:
|
||||
total_query = total_query.where(Alert.status == AlertStatus(status))
|
||||
total_result = await db.execute(total_query)
|
||||
total = total_result.scalar()
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"data": [alert.to_dict() for alert in alerts],
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{alert_id}/acknowledge")
|
||||
async def acknowledge_alert(
|
||||
alert_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(Alert).where(Alert.id == alert_id))
|
||||
alert = result.scalar_one_or_none()
|
||||
|
||||
if not alert:
|
||||
return {"error": "Alert not found"}
|
||||
|
||||
alert.status = AlertStatus.ACKNOWLEDGED
|
||||
alert.acknowledged_by = current_user.id
|
||||
alert.acknowledged_at = datetime.utcnow()
|
||||
await db.commit()
|
||||
|
||||
return {"message": "Alert acknowledged", "alert": alert.to_dict()}
|
||||
|
||||
|
||||
@router.post("/{alert_id}/resolve")
|
||||
async def resolve_alert(
|
||||
alert_id: int,
|
||||
resolution: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(Alert).where(Alert.id == alert_id))
|
||||
alert = result.scalar_one_or_none()
|
||||
|
||||
if not alert:
|
||||
return {"error": "Alert not found"}
|
||||
|
||||
alert.status = AlertStatus.RESOLVED
|
||||
alert.resolved_by = current_user.id
|
||||
alert.resolved_at = datetime.utcnow()
|
||||
alert.resolution_notes = resolution
|
||||
await db.commit()
|
||||
|
||||
return {"message": "Alert resolved", "alert": alert.to_dict()}
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
async def get_alert_stats(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
critical_query = select(func.count(Alert.id)).where(
|
||||
Alert.severity == AlertSeverity.CRITICAL,
|
||||
Alert.status == AlertStatus.ACTIVE,
|
||||
)
|
||||
warning_query = select(func.count(Alert.id)).where(
|
||||
Alert.severity == AlertSeverity.WARNING,
|
||||
Alert.status == AlertStatus.ACTIVE,
|
||||
)
|
||||
info_query = select(func.count(Alert.id)).where(
|
||||
Alert.severity == AlertSeverity.INFO,
|
||||
Alert.status == AlertStatus.ACTIVE,
|
||||
)
|
||||
|
||||
critical_result = await db.execute(critical_query)
|
||||
warning_result = await db.execute(warning_query)
|
||||
info_result = await db.execute(info_query)
|
||||
|
||||
return {
|
||||
"critical": critical_result.scalar() or 0,
|
||||
"warning": warning_result.scalar() or 0,
|
||||
"info": info_result.scalar() or 0,
|
||||
}
|
||||
108
backend/app/api/v1/auth.py
Normal file
108
backend/app/api/v1/auth.py
Normal file
@@ -0,0 +1,108 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import text
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.security import (
|
||||
create_access_token,
|
||||
create_refresh_token,
|
||||
blacklist_token,
|
||||
get_current_user,
|
||||
verify_password,
|
||||
)
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.schemas.token import Token
|
||||
from app.schemas.user import UserCreate, UserResponse
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/login", response_model=Token)
|
||||
async def login(
|
||||
form_data: OAuth2PasswordRequestForm = Depends(),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(
|
||||
text(
|
||||
"SELECT id, username, email, password_hash, role, is_active FROM users WHERE username = :username"
|
||||
),
|
||||
{"username": form_data.username},
|
||||
)
|
||||
row = result.fetchone()
|
||||
if row is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid credentials",
|
||||
)
|
||||
|
||||
user = User()
|
||||
user.id = row[0]
|
||||
user.username = row[1]
|
||||
user.email = row[2]
|
||||
user.password_hash = row[3]
|
||||
user.role = row[4]
|
||||
user.is_active = row[5]
|
||||
|
||||
if not verify_password(form_data.password, user.password_hash):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid credentials",
|
||||
)
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User is inactive",
|
||||
)
|
||||
|
||||
access_token = create_access_token(data={"sub": user.id})
|
||||
refresh_token = create_refresh_token(data={"sub": user.id})
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"token_type": "bearer",
|
||||
"expires_in": settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60,
|
||||
"user": {
|
||||
"id": user.id,
|
||||
"username": user.username,
|
||||
"role": user.role,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@router.post("/refresh", response_model=Token)
|
||||
async def refresh_token(
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
access_token = create_access_token(data={"sub": current_user.id})
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"token_type": "bearer",
|
||||
"expires_in": settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60,
|
||||
"user": {
|
||||
"id": current_user.id,
|
||||
"username": current_user.username,
|
||||
"role": current_user.role,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@router.post("/logout")
|
||||
async def logout():
|
||||
return {"message": "Successfully logged out"}
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserResponse)
|
||||
async def get_me(current_user: User = Depends(get_current_user)):
|
||||
return {
|
||||
"id": current_user.id,
|
||||
"username": current_user.username,
|
||||
"email": current_user.email,
|
||||
"role": current_user.role,
|
||||
"is_active": current_user.is_active,
|
||||
"created_at": current_user.created_at,
|
||||
}
|
||||
431
backend/app/api/v1/collected_data.py
Normal file
431
backend/app/api/v1/collected_data.py
Normal file
@@ -0,0 +1,431 @@
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status, Response
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy import select, func, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import json
|
||||
import csv
|
||||
import io
|
||||
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.core.security import get_current_user
|
||||
from app.models.collected_data import CollectedData
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_collected_data(
|
||||
source: Optional[str] = Query(None, description="数据源过滤"),
|
||||
data_type: Optional[str] = Query(None, description="数据类型过滤"),
|
||||
country: Optional[str] = Query(None, description="国家过滤"),
|
||||
search: Optional[str] = Query(None, description="搜索名称"),
|
||||
page: int = Query(1, ge=1, description="页码"),
|
||||
page_size: int = Query(20, ge=1, le=100, description="每页数量"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""查询采集的数据列表"""
|
||||
|
||||
# Build WHERE clause
|
||||
conditions = []
|
||||
params = {}
|
||||
|
||||
if source:
|
||||
conditions.append("source = :source")
|
||||
params["source"] = source
|
||||
if data_type:
|
||||
conditions.append("data_type = :data_type")
|
||||
params["data_type"] = data_type
|
||||
if country:
|
||||
conditions.append("country = :country")
|
||||
params["country"] = country
|
||||
if search:
|
||||
conditions.append("(name ILIKE :search OR title ILIKE :search)")
|
||||
params["search"] = f"%{search}%"
|
||||
|
||||
where_sql = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
# Calculate offset
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# Query total count
|
||||
count_query = text(f"SELECT COUNT(*) FROM collected_data WHERE {where_sql}")
|
||||
count_result = await db.execute(count_query, params)
|
||||
total = count_result.scalar()
|
||||
|
||||
# Query data
|
||||
query = text(f"""
|
||||
SELECT id, source, source_id, data_type, name, title, description,
|
||||
country, city, latitude, longitude, value, unit,
|
||||
metadata, collected_at, reference_date, is_valid
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
ORDER BY collected_at DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
""")
|
||||
params["limit"] = page_size
|
||||
params["offset"] = offset
|
||||
|
||||
result = await db.execute(query, params)
|
||||
rows = result.fetchall()
|
||||
|
||||
data = []
|
||||
for row in rows:
|
||||
data.append(
|
||||
{
|
||||
"id": row[0],
|
||||
"source": row[1],
|
||||
"source_id": row[2],
|
||||
"data_type": row[3],
|
||||
"name": row[4],
|
||||
"title": row[5],
|
||||
"description": row[6],
|
||||
"country": row[7],
|
||||
"city": row[8],
|
||||
"latitude": row[9],
|
||||
"longitude": row[10],
|
||||
"value": row[11],
|
||||
"unit": row[12],
|
||||
"metadata": row[13],
|
||||
"collected_at": row[14].isoformat() if row[14] else None,
|
||||
"reference_date": row[15].isoformat() if row[15] else None,
|
||||
"is_valid": row[16],
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"data": data,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/summary")
|
||||
async def get_data_summary(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取数据汇总统计"""
|
||||
|
||||
# By source and data_type
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT source, data_type, COUNT(*) as count
|
||||
FROM collected_data
|
||||
GROUP BY source, data_type
|
||||
ORDER BY source, data_type
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
by_source = {}
|
||||
total = 0
|
||||
for row in rows:
|
||||
source = row[0]
|
||||
data_type = row[1]
|
||||
count = row[2]
|
||||
|
||||
if source not in by_source:
|
||||
by_source[source] = {}
|
||||
by_source[source][data_type] = count
|
||||
total += count
|
||||
|
||||
# Total by source
|
||||
source_totals = await db.execute(
|
||||
text("""
|
||||
SELECT source, COUNT(*) as count
|
||||
FROM collected_data
|
||||
GROUP BY source
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
)
|
||||
source_rows = source_totals.fetchall()
|
||||
|
||||
return {
|
||||
"total_records": total,
|
||||
"by_source": by_source,
|
||||
"source_totals": [{"source": row[0], "count": row[1]} for row in source_rows],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/sources")
|
||||
async def get_data_sources(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取所有数据源列表"""
|
||||
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT DISTINCT source FROM collected_data ORDER BY source
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
return {
|
||||
"sources": [row[0] for row in rows],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/types")
|
||||
async def get_data_types(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取所有数据类型列表"""
|
||||
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT DISTINCT data_type FROM collected_data ORDER BY data_type
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
return {
|
||||
"data_types": [row[0] for row in rows],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/countries")
|
||||
async def get_countries(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取所有国家列表"""
|
||||
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT DISTINCT country FROM collected_data
|
||||
WHERE country IS NOT NULL AND country != ''
|
||||
ORDER BY country
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
return {
|
||||
"countries": [row[0] for row in rows],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{data_id}")
|
||||
async def get_collected_data(
|
||||
data_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取单条采集数据详情"""
|
||||
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT id, source, source_id, data_type, name, title, description,
|
||||
country, city, latitude, longitude, value, unit,
|
||||
metadata, collected_at, reference_date, is_valid
|
||||
FROM collected_data
|
||||
WHERE id = :id
|
||||
"""),
|
||||
{"id": data_id},
|
||||
)
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="数据不存在",
|
||||
)
|
||||
|
||||
return {
|
||||
"id": row[0],
|
||||
"source": row[1],
|
||||
"source_id": row[2],
|
||||
"data_type": row[3],
|
||||
"name": row[4],
|
||||
"title": row[5],
|
||||
"description": row[6],
|
||||
"country": row[7],
|
||||
"city": row[8],
|
||||
"latitude": row[9],
|
||||
"longitude": row[10],
|
||||
"value": row[11],
|
||||
"unit": row[12],
|
||||
"metadata": row[13],
|
||||
"collected_at": row[14].isoformat() if row[14] else None,
|
||||
"reference_date": row[15].isoformat() if row[15] else None,
|
||||
"is_valid": row[16],
|
||||
}
|
||||
|
||||
|
||||
def build_where_clause(
|
||||
source: Optional[str], data_type: Optional[str], country: Optional[str], search: Optional[str]
|
||||
):
|
||||
"""Build WHERE clause and params for queries"""
|
||||
conditions = []
|
||||
params = {}
|
||||
|
||||
if source:
|
||||
conditions.append("source = :source")
|
||||
params["source"] = source
|
||||
if data_type:
|
||||
conditions.append("data_type = :data_type")
|
||||
params["data_type"] = data_type
|
||||
if country:
|
||||
conditions.append("country = :country")
|
||||
params["country"] = country
|
||||
if search:
|
||||
conditions.append("(name ILIKE :search OR title ILIKE :search)")
|
||||
params["search"] = f"%{search}%"
|
||||
|
||||
where_sql = " AND ".join(conditions) if conditions else "1=1"
|
||||
return where_sql, params
|
||||
|
||||
|
||||
@router.get("/export/json")
|
||||
async def export_json(
|
||||
source: Optional[str] = Query(None, description="数据源过滤"),
|
||||
data_type: Optional[str] = Query(None, description="数据类型过滤"),
|
||||
country: Optional[str] = Query(None, description="国家过滤"),
|
||||
search: Optional[str] = Query(None, description="搜索名称"),
|
||||
limit: int = Query(10000, ge=1, le=50000, description="最大导出数量"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""导出数据为 JSON 格式"""
|
||||
|
||||
where_sql, params = build_where_clause(source, data_type, country, search)
|
||||
params["limit"] = limit
|
||||
|
||||
query = text(f"""
|
||||
SELECT id, source, source_id, data_type, name, title, description,
|
||||
country, city, latitude, longitude, value, unit,
|
||||
metadata, collected_at, reference_date, is_valid
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
ORDER BY collected_at DESC
|
||||
LIMIT :limit
|
||||
""")
|
||||
|
||||
result = await db.execute(query, params)
|
||||
rows = result.fetchall()
|
||||
|
||||
data = []
|
||||
for row in rows:
|
||||
data.append(
|
||||
{
|
||||
"id": row[0],
|
||||
"source": row[1],
|
||||
"source_id": row[2],
|
||||
"data_type": row[3],
|
||||
"name": row[4],
|
||||
"title": row[5],
|
||||
"description": row[6],
|
||||
"country": row[7],
|
||||
"city": row[8],
|
||||
"latitude": row[9],
|
||||
"longitude": row[10],
|
||||
"value": row[11],
|
||||
"unit": row[12],
|
||||
"metadata": row[13],
|
||||
"collected_at": row[14].isoformat() if row[14] else None,
|
||||
"reference_date": row[15].isoformat() if row[15] else None,
|
||||
"is_valid": row[16],
|
||||
}
|
||||
)
|
||||
|
||||
json_str = json.dumps({"data": data, "total": len(data)}, ensure_ascii=False, indent=2)
|
||||
|
||||
return StreamingResponse(
|
||||
io.StringIO(json_str),
|
||||
media_type="application/json",
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename=collected_data_{source or 'all'}.json"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/export/csv")
|
||||
async def export_csv(
|
||||
source: Optional[str] = Query(None, description="数据源过滤"),
|
||||
data_type: Optional[str] = Query(None, description="数据类型过滤"),
|
||||
country: Optional[str] = Query(None, description="国家过滤"),
|
||||
search: Optional[str] = Query(None, description="搜索名称"),
|
||||
limit: int = Query(10000, ge=1, le=50000, description="最大导出数量"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""导出数据为 CSV 格式"""
|
||||
|
||||
where_sql, params = build_where_clause(source, data_type, country, search)
|
||||
params["limit"] = limit
|
||||
|
||||
query = text(f"""
|
||||
SELECT id, source, source_id, data_type, name, title, description,
|
||||
country, city, latitude, longitude, value, unit,
|
||||
metadata, collected_at, reference_date, is_valid
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
ORDER BY collected_at DESC
|
||||
LIMIT :limit
|
||||
""")
|
||||
|
||||
result = await db.execute(query, params)
|
||||
rows = result.fetchall()
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
# Write header
|
||||
writer.writerow(
|
||||
[
|
||||
"ID",
|
||||
"Source",
|
||||
"Source ID",
|
||||
"Type",
|
||||
"Name",
|
||||
"Title",
|
||||
"Description",
|
||||
"Country",
|
||||
"City",
|
||||
"Latitude",
|
||||
"Longitude",
|
||||
"Value",
|
||||
"Unit",
|
||||
"Metadata",
|
||||
"Collected At",
|
||||
"Reference Date",
|
||||
"Is Valid",
|
||||
]
|
||||
)
|
||||
|
||||
# Write data
|
||||
for row in rows:
|
||||
writer.writerow(
|
||||
[
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
row[3],
|
||||
row[4],
|
||||
row[5],
|
||||
row[6],
|
||||
row[7],
|
||||
row[8],
|
||||
row[9],
|
||||
row[10],
|
||||
row[11],
|
||||
row[12],
|
||||
json.dumps(row[13]) if row[13] else "",
|
||||
row[14].isoformat() if row[14] else "",
|
||||
row[15].isoformat() if row[15] else "",
|
||||
row[16],
|
||||
]
|
||||
)
|
||||
|
||||
return StreamingResponse(
|
||||
io.StringIO(output.getvalue()),
|
||||
media_type="text/csv",
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename=collected_data_{source or 'all'}.csv"
|
||||
},
|
||||
)
|
||||
239
backend/app/api/v1/dashboard.py
Normal file
239
backend/app/api/v1/dashboard.py
Normal file
@@ -0,0 +1,239 @@
|
||||
"""Dashboard API with caching and optimizations"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy import select, func, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.datasource_config import DataSourceConfig
|
||||
from app.models.alert import Alert, AlertSeverity
|
||||
from app.models.task import CollectionTask
|
||||
from app.core.security import get_current_user
|
||||
from app.core.cache import cache
|
||||
|
||||
# Built-in collectors info (mirrored from datasources.py)
|
||||
COLLECTOR_INFO = {
|
||||
"top500": {
|
||||
"id": 1,
|
||||
"name": "TOP500 Supercomputers",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_hours": 4,
|
||||
},
|
||||
"epoch_ai_gpu": {
|
||||
"id": 2,
|
||||
"name": "Epoch AI GPU Clusters",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_hours": 6,
|
||||
},
|
||||
"huggingface_models": {
|
||||
"id": 3,
|
||||
"name": "HuggingFace Models",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 12,
|
||||
},
|
||||
"huggingface_datasets": {
|
||||
"id": 4,
|
||||
"name": "HuggingFace Datasets",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 12,
|
||||
},
|
||||
"huggingface_spaces": {
|
||||
"id": 5,
|
||||
"name": "HuggingFace Spaces",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"peeringdb_ixp": {
|
||||
"id": 6,
|
||||
"name": "PeeringDB IXP",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"peeringdb_network": {
|
||||
"id": 7,
|
||||
"name": "PeeringDB Networks",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 48,
|
||||
},
|
||||
"peeringdb_facility": {
|
||||
"id": 8,
|
||||
"name": "PeeringDB Facilities",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 48,
|
||||
},
|
||||
"telegeography_cables": {
|
||||
"id": 9,
|
||||
"name": "Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"telegeography_landing": {
|
||||
"id": 10,
|
||||
"name": "Cable Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"telegeography_systems": {
|
||||
"id": 11,
|
||||
"name": "Cable Systems",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
}
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
async def get_stats(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Get dashboard statistics with caching"""
|
||||
cache_key = "dashboard:stats"
|
||||
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
# Count built-in collectors
|
||||
built_in_count = len(COLLECTOR_INFO)
|
||||
built_in_active = built_in_count # Built-in are always "active" for counting purposes
|
||||
|
||||
# Count custom configs from database
|
||||
result = await db.execute(select(func.count(DataSourceConfig.id)))
|
||||
custom_count = result.scalar() or 0
|
||||
|
||||
result = await db.execute(
|
||||
select(func.count(DataSourceConfig.id)).where(DataSourceConfig.is_active == True)
|
||||
)
|
||||
custom_active = result.scalar() or 0
|
||||
|
||||
# Total datasources
|
||||
total_datasources = built_in_count + custom_count
|
||||
active_datasources = built_in_active + custom_active
|
||||
|
||||
# Tasks today (from database)
|
||||
result = await db.execute(
|
||||
select(func.count(CollectionTask.id)).where(CollectionTask.started_at >= today_start)
|
||||
)
|
||||
tasks_today = result.scalar() or 0
|
||||
|
||||
result = await db.execute(
|
||||
select(func.count(CollectionTask.id)).where(
|
||||
CollectionTask.status == "success",
|
||||
CollectionTask.started_at >= today_start,
|
||||
)
|
||||
)
|
||||
success_tasks = result.scalar() or 0
|
||||
success_rate = (success_tasks / tasks_today * 100) if tasks_today > 0 else 0
|
||||
|
||||
# Alerts
|
||||
result = await db.execute(
|
||||
select(func.count(Alert.id)).where(
|
||||
Alert.severity == AlertSeverity.CRITICAL,
|
||||
Alert.status == "active",
|
||||
)
|
||||
)
|
||||
critical_alerts = result.scalar() or 0
|
||||
|
||||
result = await db.execute(
|
||||
select(func.count(Alert.id)).where(
|
||||
Alert.severity == AlertSeverity.WARNING,
|
||||
Alert.status == "active",
|
||||
)
|
||||
)
|
||||
warning_alerts = result.scalar() or 0
|
||||
|
||||
result = await db.execute(
|
||||
select(func.count(Alert.id)).where(
|
||||
Alert.severity == AlertSeverity.INFO,
|
||||
Alert.status == "active",
|
||||
)
|
||||
)
|
||||
info_alerts = result.scalar() or 0
|
||||
|
||||
response = {
|
||||
"total_datasources": total_datasources,
|
||||
"active_datasources": active_datasources,
|
||||
"tasks_today": tasks_today,
|
||||
"success_rate": round(success_rate, 1),
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
"alerts": {
|
||||
"critical": critical_alerts,
|
||||
"warning": warning_alerts,
|
||||
"info": info_alerts,
|
||||
},
|
||||
}
|
||||
|
||||
cache.set(cache_key, response, expire_seconds=60)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/summary")
|
||||
async def get_summary(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Get dashboard summary by module with caching"""
|
||||
cache_key = "dashboard:summary"
|
||||
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
# Count by module for built-in collectors
|
||||
builtin_by_module = {}
|
||||
for name, info in COLLECTOR_INFO.items():
|
||||
module = info["module"]
|
||||
if module not in builtin_by_module:
|
||||
builtin_by_module[module] = {"datasources": 0, "sources": []}
|
||||
builtin_by_module[module]["datasources"] += 1
|
||||
builtin_by_module[module]["sources"].append(info["name"])
|
||||
|
||||
# Count custom configs by module (default to L3 for custom)
|
||||
result = await db.execute(
|
||||
select(DataSourceConfig.source_type, func.count(DataSourceConfig.id).label("count"))
|
||||
.where(DataSourceConfig.is_active == True)
|
||||
.group_by(DataSourceConfig.source_type)
|
||||
)
|
||||
custom_rows = result.fetchall()
|
||||
|
||||
for row in custom_rows:
|
||||
source_type = row.source_type
|
||||
module = "L3" # Custom configs default to L3
|
||||
if module not in builtin_by_module:
|
||||
builtin_by_module[module] = {"datasources": 0, "sources": []}
|
||||
builtin_by_module[module]["datasources"] += row.count
|
||||
builtin_by_module[module]["sources"].append(f"自定义 ({source_type})")
|
||||
|
||||
summary = {}
|
||||
for module, data in builtin_by_module.items():
|
||||
summary[module] = {
|
||||
"datasources": data["datasources"],
|
||||
"total_records": 0, # Built-in don't track this in dashboard stats
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
}
|
||||
|
||||
response = {"modules": summary, "last_updated": datetime.utcnow().isoformat()}
|
||||
|
||||
cache.set(cache_key, response, expire_seconds=300)
|
||||
|
||||
return response
|
||||
309
backend/app/api/v1/datasource_config.py
Normal file
309
backend/app/api/v1/datasource_config.py
Normal file
@@ -0,0 +1,309 @@
|
||||
"""DataSourceConfig API for user-defined data sources"""
|
||||
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
import base64
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from pydantic import BaseModel, Field
|
||||
import httpx
|
||||
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.models.datasource_config import DataSourceConfig
|
||||
from app.core.security import get_current_user
|
||||
from app.core.cache import cache
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class DataSourceConfigCreate(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=100)
|
||||
description: Optional[str] = None
|
||||
source_type: str = Field(..., description="http, api, database")
|
||||
endpoint: str = Field(..., max_length=500)
|
||||
auth_type: str = Field(default="none", description="none, bearer, api_key, basic")
|
||||
auth_config: dict = Field(default={})
|
||||
headers: dict = Field(default={})
|
||||
config: dict = Field(default={"timeout": 30, "retry": 3})
|
||||
|
||||
|
||||
class DataSourceConfigUpdate(BaseModel):
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=100)
|
||||
description: Optional[str] = None
|
||||
source_type: Optional[str] = None
|
||||
endpoint: Optional[str] = Field(None, max_length=500)
|
||||
auth_type: Optional[str] = None
|
||||
auth_config: Optional[dict] = None
|
||||
headers: Optional[dict] = None
|
||||
config: Optional[dict] = None
|
||||
is_active: Optional[bool] = None
|
||||
|
||||
|
||||
class DataSourceConfigResponse(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
description: Optional[str]
|
||||
source_type: str
|
||||
endpoint: str
|
||||
auth_type: str
|
||||
headers: dict
|
||||
config: dict
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
async def test_endpoint(
|
||||
endpoint: str,
|
||||
auth_type: str,
|
||||
auth_config: dict,
|
||||
headers: dict,
|
||||
config: dict,
|
||||
) -> dict:
|
||||
"""Test an endpoint connection"""
|
||||
timeout = config.get("timeout", 30)
|
||||
test_headers = headers.copy()
|
||||
|
||||
# Add auth headers
|
||||
if auth_type == "bearer" and auth_config.get("token"):
|
||||
test_headers["Authorization"] = f"Bearer {auth_config['token']}"
|
||||
elif auth_type == "api_key" and auth_config.get("api_key"):
|
||||
key_name = auth_config.get("key_name", "X-API-Key")
|
||||
test_headers[key_name] = auth_config["api_key"]
|
||||
elif auth_type == "basic":
|
||||
username = auth_config.get("username", "")
|
||||
password = auth_config.get("password", "")
|
||||
credentials = f"{username}:{password}"
|
||||
encoded = base64.b64encode(credentials.encode()).decode()
|
||||
test_headers["Authorization"] = f"Basic {encoded}"
|
||||
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
response = await client.get(endpoint, headers=test_headers)
|
||||
response.raise_for_status()
|
||||
return {
|
||||
"status_code": response.status_code,
|
||||
"success": True,
|
||||
"response_time_ms": response.elapsed.total_seconds() * 1000,
|
||||
"data_preview": str(response.json()[:3])
|
||||
if response.headers.get("content-type", "").startswith("application/json")
|
||||
else response.text[:200],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/configs")
|
||||
async def list_configs(
|
||||
active_only: bool = False,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""List all user-defined data source configurations"""
|
||||
query = select(DataSourceConfig)
|
||||
if active_only:
|
||||
query = query.where(DataSourceConfig.is_active == True)
|
||||
query = query.order_by(DataSourceConfig.created_at.desc())
|
||||
|
||||
result = await db.execute(query)
|
||||
configs = result.scalars().all()
|
||||
|
||||
return {
|
||||
"total": len(configs),
|
||||
"data": [
|
||||
{
|
||||
"id": c.id,
|
||||
"name": c.name,
|
||||
"description": c.description,
|
||||
"source_type": c.source_type,
|
||||
"endpoint": c.endpoint,
|
||||
"auth_type": c.auth_type,
|
||||
"headers": c.headers,
|
||||
"config": c.config,
|
||||
"is_active": c.is_active,
|
||||
"created_at": c.created_at.isoformat() if c.created_at else None,
|
||||
"updated_at": c.updated_at.isoformat() if c.updated_at else None,
|
||||
}
|
||||
for c in configs
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/configs/{config_id}")
|
||||
async def get_config(
|
||||
config_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Get a single data source configuration"""
|
||||
result = await db.execute(select(DataSourceConfig).where(DataSourceConfig.id == config_id))
|
||||
config = result.scalar_one_or_none()
|
||||
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
return {
|
||||
"id": config.id,
|
||||
"name": config.name,
|
||||
"description": config.description,
|
||||
"source_type": config.source_type,
|
||||
"endpoint": config.endpoint,
|
||||
"auth_type": config.auth_type,
|
||||
"auth_config": {}, # Don't return sensitive data
|
||||
"headers": config.headers,
|
||||
"config": config.config,
|
||||
"is_active": config.is_active,
|
||||
"created_at": config.created_at.isoformat() if config.created_at else None,
|
||||
"updated_at": config.updated_at.isoformat() if config.updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/configs")
|
||||
async def create_config(
|
||||
config_data: DataSourceConfigCreate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Create a new data source configuration"""
|
||||
config = DataSourceConfig(
|
||||
name=config_data.name,
|
||||
description=config_data.description,
|
||||
source_type=config_data.source_type,
|
||||
endpoint=config_data.endpoint,
|
||||
auth_type=config_data.auth_type,
|
||||
auth_config=config_data.auth_config,
|
||||
headers=config_data.headers,
|
||||
config=config_data.config,
|
||||
)
|
||||
|
||||
db.add(config)
|
||||
await db.commit()
|
||||
await db.refresh(config)
|
||||
|
||||
cache.delete_pattern("datasource_configs:*")
|
||||
|
||||
return {
|
||||
"id": config.id,
|
||||
"name": config.name,
|
||||
"message": "Configuration created successfully",
|
||||
}
|
||||
|
||||
|
||||
@router.put("/configs/{config_id}")
|
||||
async def update_config(
|
||||
config_id: int,
|
||||
config_data: DataSourceConfigUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Update a data source configuration"""
|
||||
result = await db.execute(select(DataSourceConfig).where(DataSourceConfig.id == config_id))
|
||||
config = result.scalar_one_or_none()
|
||||
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
update_data = config_data.model_dump(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(config, field, value)
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(config)
|
||||
|
||||
cache.delete_pattern("datasource_configs:*")
|
||||
|
||||
return {
|
||||
"id": config.id,
|
||||
"name": config.name,
|
||||
"message": "Configuration updated successfully",
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/configs/{config_id}")
|
||||
async def delete_config(
|
||||
config_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Delete a data source configuration"""
|
||||
result = await db.execute(select(DataSourceConfig).where(DataSourceConfig.id == config_id))
|
||||
config = result.scalar_one_or_none()
|
||||
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
await db.delete(config)
|
||||
await db.commit()
|
||||
|
||||
cache.delete_pattern("datasource_configs:*")
|
||||
|
||||
return {"message": "Configuration deleted successfully"}
|
||||
|
||||
|
||||
@router.post("/configs/{config_id}/test")
|
||||
async def test_config(
|
||||
config_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Test a data source configuration"""
|
||||
result = await db.execute(select(DataSourceConfig).where(DataSourceConfig.id == config_id))
|
||||
config = result.scalar_one_or_none()
|
||||
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
try:
|
||||
result = await test_endpoint(
|
||||
endpoint=config.endpoint,
|
||||
auth_type=config.auth_type,
|
||||
auth_config=config.auth_config or {},
|
||||
headers=config.headers or {},
|
||||
config=config.config or {},
|
||||
)
|
||||
return result
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"HTTP Error: {e.response.status_code}",
|
||||
"message": str(e),
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Connection failed",
|
||||
"message": str(e),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/configs/test")
|
||||
async def test_new_config(
|
||||
config_data: DataSourceConfigCreate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Test a new data source configuration without saving"""
|
||||
try:
|
||||
result = await test_endpoint(
|
||||
endpoint=config_data.endpoint,
|
||||
auth_type=config_data.auth_type,
|
||||
auth_config=config_data.auth_config or {},
|
||||
headers=config_data.headers or {},
|
||||
config=config_data.config or {},
|
||||
)
|
||||
return result
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"HTTP Error: {e.response.status_code}",
|
||||
"message": str(e),
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Connection failed",
|
||||
"message": str(e),
|
||||
}
|
||||
258
backend/app/api/v1/datasources.py
Normal file
258
backend/app/api/v1/datasources.py
Normal file
@@ -0,0 +1,258 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.models.datasource import DataSource
|
||||
from app.core.security import get_current_user
|
||||
from app.services.collectors.registry import collector_registry
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
COLLECTOR_INFO = {
|
||||
"top500": {
|
||||
"id": 1,
|
||||
"name": "TOP500 Supercomputers",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_hours": 4,
|
||||
},
|
||||
"epoch_ai_gpu": {
|
||||
"id": 2,
|
||||
"name": "Epoch AI GPU Clusters",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_hours": 6,
|
||||
},
|
||||
"huggingface_models": {
|
||||
"id": 3,
|
||||
"name": "HuggingFace Models",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 12,
|
||||
},
|
||||
"huggingface_datasets": {
|
||||
"id": 4,
|
||||
"name": "HuggingFace Datasets",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 12,
|
||||
},
|
||||
"huggingface_spaces": {
|
||||
"id": 5,
|
||||
"name": "HuggingFace Spaces",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"peeringdb_ixp": {
|
||||
"id": 6,
|
||||
"name": "PeeringDB IXP",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"peeringdb_network": {
|
||||
"id": 7,
|
||||
"name": "PeeringDB Networks",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 48,
|
||||
},
|
||||
"peeringdb_facility": {
|
||||
"id": 8,
|
||||
"name": "PeeringDB Facilities",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 48,
|
||||
},
|
||||
"telegeography_cables": {
|
||||
"id": 9,
|
||||
"name": "Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"telegeography_landing": {
|
||||
"id": 10,
|
||||
"name": "Cable Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"telegeography_systems": {
|
||||
"id": 11,
|
||||
"name": "Cable Systems",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
}
|
||||
|
||||
ID_TO_COLLECTOR = {info["id"]: name for name, info in COLLECTOR_INFO.items()}
|
||||
COLLECTOR_TO_ID = {name: info["id"] for name, info in COLLECTOR_INFO.items()}
|
||||
|
||||
|
||||
def get_collector_name(source_id: str) -> Optional[str]:
|
||||
try:
|
||||
numeric_id = int(source_id)
|
||||
if numeric_id in ID_TO_COLLECTOR:
|
||||
return ID_TO_COLLECTOR[numeric_id]
|
||||
except ValueError:
|
||||
pass
|
||||
if source_id in COLLECTOR_INFO:
|
||||
return source_id
|
||||
return None
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_datasources(
|
||||
module: Optional[str] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
priority: Optional[str] = None,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
query = select(DataSource)
|
||||
|
||||
filters = []
|
||||
if module:
|
||||
filters.append(DataSource.module == module)
|
||||
if is_active is not None:
|
||||
filters.append(DataSource.is_active == is_active)
|
||||
if priority:
|
||||
filters.append(DataSource.priority == priority)
|
||||
|
||||
if filters:
|
||||
query = query.where(*filters)
|
||||
|
||||
result = await db.execute(query)
|
||||
datasources = result.scalars().all()
|
||||
|
||||
collector_list = []
|
||||
for name, info in COLLECTOR_INFO.items():
|
||||
is_active_status = collector_registry.is_active(name)
|
||||
collector_list.append(
|
||||
{
|
||||
"id": info["id"],
|
||||
"name": info["name"],
|
||||
"module": info["module"],
|
||||
"priority": info["priority"],
|
||||
"frequency": f"{info['frequency_hours']}h",
|
||||
"is_active": is_active_status,
|
||||
"collector_class": name,
|
||||
}
|
||||
)
|
||||
|
||||
if module:
|
||||
collector_list = [c for c in collector_list if c["module"] == module]
|
||||
if priority:
|
||||
collector_list = [c for c in collector_list if c["priority"] == priority]
|
||||
|
||||
return {
|
||||
"total": len(collector_list),
|
||||
"data": collector_list,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{source_id}")
|
||||
async def get_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
return {
|
||||
"id": info["id"],
|
||||
"name": info["name"],
|
||||
"module": info["module"],
|
||||
"priority": info["priority"],
|
||||
"frequency": f"{info['frequency_hours']}h",
|
||||
"collector_class": collector_name,
|
||||
"is_active": collector_registry.is_active(collector_name),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{source_id}/enable")
|
||||
async def enable_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
collector_registry.set_active(collector_name, True)
|
||||
return {"status": "enabled", "source_id": source_id}
|
||||
|
||||
|
||||
@router.post("/{source_id}/disable")
|
||||
async def disable_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
collector_registry.set_active(collector_name, False)
|
||||
return {"status": "disabled", "source_id": source_id}
|
||||
|
||||
|
||||
@router.get("/{source_id}/stats")
|
||||
async def get_datasource_stats(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
total_query = select(func.count(DataSource.id)).where(DataSource.source == info["name"])
|
||||
result = await db.execute(total_query)
|
||||
total = result.scalar() or 0
|
||||
|
||||
return {
|
||||
"source_id": source_id,
|
||||
"collector_name": collector_name,
|
||||
"name": info["name"],
|
||||
"total_records": total,
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{source_id}/trigger")
|
||||
async def trigger_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
from app.services.scheduler import run_collector_now
|
||||
|
||||
if not collector_registry.is_active(collector_name):
|
||||
raise HTTPException(status_code=400, detail="Data source is disabled")
|
||||
|
||||
success = run_collector_now(collector_name)
|
||||
|
||||
if success:
|
||||
return {
|
||||
"status": "triggered",
|
||||
"source_id": source_id,
|
||||
"collector_name": collector_name,
|
||||
"message": f"Collector '{collector_name}' has been triggered",
|
||||
}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger collector '{collector_name}'",
|
||||
)
|
||||
110
backend/app/api/v1/settings.py
Normal file
110
backend/app/api/v1/settings.py
Normal file
@@ -0,0 +1,110 @@
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel, EmailStr
|
||||
|
||||
from app.models.user import User
|
||||
from app.core.security import get_current_user
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
default_settings = {
|
||||
"system": {
|
||||
"system_name": "智能星球",
|
||||
"refresh_interval": 60,
|
||||
"auto_refresh": True,
|
||||
"data_retention_days": 30,
|
||||
"max_concurrent_tasks": 5,
|
||||
},
|
||||
"notifications": {
|
||||
"email_enabled": False,
|
||||
"email_address": "",
|
||||
"critical_alerts": True,
|
||||
"warning_alerts": True,
|
||||
"daily_summary": False,
|
||||
},
|
||||
"security": {
|
||||
"session_timeout": 60,
|
||||
"max_login_attempts": 5,
|
||||
"password_policy": "medium",
|
||||
},
|
||||
}
|
||||
|
||||
system_settings = default_settings["system"].copy()
|
||||
notification_settings = default_settings["notifications"].copy()
|
||||
security_settings = default_settings["security"].copy()
|
||||
|
||||
|
||||
class SystemSettingsUpdate(BaseModel):
|
||||
system_name: str = "智能星球"
|
||||
refresh_interval: int = 60
|
||||
auto_refresh: bool = True
|
||||
data_retention_days: int = 30
|
||||
max_concurrent_tasks: int = 5
|
||||
|
||||
|
||||
class NotificationSettingsUpdate(BaseModel):
|
||||
email_enabled: bool = False
|
||||
email_address: Optional[EmailStr] = None
|
||||
critical_alerts: bool = True
|
||||
warning_alerts: bool = True
|
||||
daily_summary: bool = False
|
||||
|
||||
|
||||
class SecuritySettingsUpdate(BaseModel):
|
||||
session_timeout: int = 60
|
||||
max_login_attempts: int = 5
|
||||
password_policy: str = "medium"
|
||||
|
||||
|
||||
@router.get("/system")
|
||||
async def get_system_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"system": system_settings}
|
||||
|
||||
|
||||
@router.put("/system")
|
||||
async def update_system_settings(
|
||||
settings: SystemSettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
global system_settings
|
||||
system_settings = settings.model_dump()
|
||||
return {"status": "updated", "system": system_settings}
|
||||
|
||||
|
||||
@router.get("/notifications")
|
||||
async def get_notification_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"notifications": notification_settings}
|
||||
|
||||
|
||||
@router.put("/notifications")
|
||||
async def update_notification_settings(
|
||||
settings: NotificationSettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
global notification_settings
|
||||
notification_settings = settings.model_dump()
|
||||
return {"status": "updated", "notifications": notification_settings}
|
||||
|
||||
|
||||
@router.get("/security")
|
||||
async def get_security_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"security": security_settings}
|
||||
|
||||
|
||||
@router.put("/security")
|
||||
async def update_security_settings(
|
||||
settings: SecuritySettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
global security_settings
|
||||
security_settings = settings.model_dump()
|
||||
return {"status": "updated", "security": security_settings}
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def get_all_settings(current_user: User = Depends(get_current_user)):
|
||||
return {
|
||||
"system": system_settings,
|
||||
"notifications": notification_settings,
|
||||
"security": security_settings,
|
||||
}
|
||||
157
backend/app/api/v1/tasks.py
Normal file
157
backend/app/api/v1/tasks.py
Normal file
@@ -0,0 +1,157 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import text
|
||||
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.core.security import get_current_user
|
||||
from app.services.collectors.registry import collector_registry
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_tasks(
|
||||
datasource_id: int = None,
|
||||
status: str = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
offset = (page - 1) * page_size
|
||||
query = """
|
||||
SELECT ct.id, ct.datasource_id, ds.name as datasource_name, ct.status,
|
||||
ct.started_at, ct.completed_at, ct.records_processed, ct.error_message
|
||||
FROM collection_tasks ct
|
||||
JOIN data_sources ds ON ct.datasource_id = ds.id
|
||||
WHERE 1=1
|
||||
"""
|
||||
count_query = "SELECT COUNT(*) FROM collection_tasks ct WHERE 1=1"
|
||||
params = {}
|
||||
|
||||
if datasource_id:
|
||||
query += " AND ct.datasource_id = :datasource_id"
|
||||
count_query += " WHERE ct.datasource_id = :datasource_id"
|
||||
params["datasource_id"] = datasource_id
|
||||
if status:
|
||||
query += " AND ct.status = :status"
|
||||
count_query += " AND ct.status = :status"
|
||||
params["status"] = status
|
||||
|
||||
query += f" ORDER BY ct.created_at DESC LIMIT {page_size} OFFSET {offset}"
|
||||
|
||||
result = await db.execute(text(query), params)
|
||||
tasks = result.fetchall()
|
||||
|
||||
count_result = await db.execute(text(count_query), params)
|
||||
total = count_result.scalar()
|
||||
|
||||
return {
|
||||
"total": total or 0,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"data": [
|
||||
{
|
||||
"id": t[0],
|
||||
"datasource_id": t[1],
|
||||
"datasource_name": t[2],
|
||||
"status": t[3],
|
||||
"started_at": t[4].isoformat() if t[4] else None,
|
||||
"completed_at": t[5].isoformat() if t[5] else None,
|
||||
"records_processed": t[6],
|
||||
"error_message": t[7],
|
||||
}
|
||||
for t in tasks
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{task_id}")
|
||||
async def get_task(
|
||||
task_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT ct.id, ct.datasource_id, ds.name as datasource_name, ct.status,
|
||||
ct.started_at, ct.completed_at, ct.records_processed, ct.error_message
|
||||
FROM collection_tasks ct
|
||||
JOIN data_sources ds ON ct.datasource_id = ds.id
|
||||
WHERE ct.id = :id
|
||||
"""),
|
||||
{"id": task_id},
|
||||
)
|
||||
task = result.fetchone()
|
||||
|
||||
if not task:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Task not found",
|
||||
)
|
||||
|
||||
return {
|
||||
"id": task[0],
|
||||
"datasource_id": task[1],
|
||||
"datasource_name": task[2],
|
||||
"status": task[3],
|
||||
"started_at": task[4].isoformat() if task[4] else None,
|
||||
"completed_at": task[5].isoformat() if task[5] else None,
|
||||
"records_processed": task[6],
|
||||
"error_message": task[7],
|
||||
}
|
||||
|
||||
|
||||
@router.post("/datasources/{source_id}/trigger")
|
||||
async def trigger_collection(
|
||||
source_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(
|
||||
text("SELECT id, name, collector_class FROM data_sources WHERE id = :id"),
|
||||
{"id": source_id},
|
||||
)
|
||||
datasource = result.fetchone()
|
||||
|
||||
if not datasource:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Data source not found",
|
||||
)
|
||||
|
||||
collector_class_name = datasource[2]
|
||||
collector_name = collector_class_name.lower().replace("collector", "")
|
||||
|
||||
collector = collector_registry.get(collector_name)
|
||||
if not collector:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Collector {collector_name} not found",
|
||||
)
|
||||
|
||||
result = await collector.run(db)
|
||||
|
||||
await db.execute(
|
||||
text("""
|
||||
INSERT INTO collection_tasks (datasource_id, status, records_processed, error_message, started_at, completed_at, created_at)
|
||||
VALUES (:datasource_id, :status, :records_processed, :error_message, :started_at, :completed_at, NOW())
|
||||
"""),
|
||||
{
|
||||
"datasource_id": source_id,
|
||||
"status": result.get("status", "unknown"),
|
||||
"records_processed": result.get("records_processed", 0),
|
||||
"error_message": result.get("error"),
|
||||
"started_at": datetime.utcnow(),
|
||||
"completed_at": datetime.utcnow(),
|
||||
},
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Collection task executed",
|
||||
"result": result,
|
||||
}
|
||||
263
backend/app/api/v1/users.py
Normal file
263
backend/app/api/v1/users.py
Normal file
@@ -0,0 +1,263 @@
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import text
|
||||
|
||||
from app.core.security import get_current_user, get_password_hash
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.schemas.user import UserCreate, UserResponse, UserUpdate
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def check_permission(current_user: User, required_roles: List[str]) -> bool:
|
||||
user_role_value = (
|
||||
current_user.role.value if hasattr(current_user.role, "value") else current_user.role
|
||||
)
|
||||
return user_role_value in required_roles
|
||||
|
||||
|
||||
@router.get("", response_model=dict)
|
||||
async def list_users(
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
role: str = None,
|
||||
is_active: bool = None,
|
||||
search: str = None,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not check_permission(current_user, ["super_admin", "admin"]):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Insufficient permissions",
|
||||
)
|
||||
|
||||
# Build WHERE clause
|
||||
where_clauses = []
|
||||
params = {}
|
||||
if role:
|
||||
where_clauses.append("role = :role")
|
||||
params["role"] = role
|
||||
if is_active is not None:
|
||||
where_clauses.append("is_active = :is_active")
|
||||
params["is_active"] = is_active
|
||||
if search:
|
||||
where_clauses.append("(username ILIKE :search OR email ILIKE :search)")
|
||||
params["search"] = f"%{search}%"
|
||||
|
||||
where_sql = " AND ".join(where_clauses) if where_clauses else "1=1"
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
query = text(
|
||||
f"SELECT id, username, email, role, is_active, last_login_at, created_at FROM users WHERE {where_sql} ORDER BY created_at DESC LIMIT {page_size} OFFSET {offset}"
|
||||
)
|
||||
count_query = text(f"SELECT COUNT(*) FROM users WHERE {where_sql}")
|
||||
|
||||
result = await db.execute(query, params)
|
||||
users = result.fetchall()
|
||||
|
||||
count_result = await db.execute(count_query, params)
|
||||
total = count_result.scalar()
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"data": [
|
||||
{
|
||||
"id": u[0],
|
||||
"username": u[1],
|
||||
"email": u[2],
|
||||
"role": u[3],
|
||||
"is_active": u[4],
|
||||
"last_login_at": u[5],
|
||||
"created_at": u[6],
|
||||
}
|
||||
for u in users
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{user_id}", response_model=dict)
|
||||
async def get_user(
|
||||
user_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not check_permission(current_user, ["super_admin", "admin"]) and current_user.id != user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Insufficient permissions",
|
||||
)
|
||||
|
||||
result = await db.execute(
|
||||
text(
|
||||
"SELECT id, username, email, role, is_active, last_login_at, created_at FROM users WHERE id = :id"
|
||||
),
|
||||
{"id": user_id},
|
||||
)
|
||||
user = result.fetchone()
|
||||
if user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="User not found",
|
||||
)
|
||||
|
||||
return {
|
||||
"id": user[0],
|
||||
"username": user[1],
|
||||
"email": user[2],
|
||||
"role": user[3],
|
||||
"is_active": user[4],
|
||||
"last_login_at": user[5],
|
||||
"created_at": user[6],
|
||||
}
|
||||
|
||||
|
||||
@router.post("", response_model=dict, status_code=status.HTTP_201_CREATED)
|
||||
async def create_user(
|
||||
user_data: UserCreate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not check_permission(current_user, ["super_admin"]):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Only super_admin can create users",
|
||||
)
|
||||
|
||||
result = await db.execute(
|
||||
text("SELECT id FROM users WHERE username = :username OR email = :email"),
|
||||
{"username": user_data.username, "email": user_data.email},
|
||||
)
|
||||
if result.fetchone():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Username or email already exists",
|
||||
)
|
||||
|
||||
hashed_password = get_password_hash(user_data.password)
|
||||
|
||||
await db.execute(
|
||||
text("""INSERT INTO users (username, email, password_hash, role, is_active, created_at, updated_at)
|
||||
VALUES (:username, :email, :password_hash, :role, :is_active, NOW(), NOW())"""),
|
||||
{
|
||||
"username": user_data.username,
|
||||
"email": user_data.email,
|
||||
"password_hash": hashed_password,
|
||||
"role": user_data.role,
|
||||
"is_active": True,
|
||||
},
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
# Get the inserted user ID
|
||||
result = await db.execute(
|
||||
text("SELECT id FROM users WHERE username = :username"),
|
||||
{"username": user_data.username},
|
||||
)
|
||||
new_user = result.fetchone()
|
||||
|
||||
if new_user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create user",
|
||||
)
|
||||
|
||||
return {
|
||||
"id": new_user[0],
|
||||
"username": user_data.username,
|
||||
"email": user_data.email,
|
||||
"role": user_data.role,
|
||||
"is_active": True,
|
||||
}
|
||||
|
||||
|
||||
@router.put("/{user_id}")
|
||||
async def update_user(
|
||||
user_id: int,
|
||||
user_data: UserUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not check_permission(current_user, ["super_admin", "admin"]) and current_user.id != user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Insufficient permissions",
|
||||
)
|
||||
|
||||
if not check_permission(current_user, ["super_admin"]) and user_data.role is not None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Only super_admin can change user role",
|
||||
)
|
||||
|
||||
result = await db.execute(
|
||||
text("SELECT id FROM users WHERE id = :id"),
|
||||
{"id": user_id},
|
||||
)
|
||||
if not result.fetchone():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="User not found",
|
||||
)
|
||||
|
||||
update_fields = []
|
||||
params = {"id": user_id}
|
||||
if user_data.email is not None:
|
||||
update_fields.append("email = :email")
|
||||
params["email"] = user_data.email
|
||||
if user_data.role is not None:
|
||||
update_fields.append("role = :role")
|
||||
params["role"] = user_data.role
|
||||
if user_data.is_active is not None:
|
||||
update_fields.append("is_active = :is_active")
|
||||
params["is_active"] = user_data.is_active
|
||||
|
||||
if update_fields:
|
||||
update_fields.append("updated_at = NOW()")
|
||||
query = text(f"UPDATE users SET {', '.join(update_fields)} WHERE id = :id")
|
||||
await db.execute(query, params)
|
||||
await db.commit()
|
||||
|
||||
return {"message": "User updated successfully"}
|
||||
|
||||
|
||||
@router.delete("/{user_id}")
|
||||
async def delete_user(
|
||||
user_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not check_permission(current_user, ["super_admin"]):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Only super_admin can delete users",
|
||||
)
|
||||
|
||||
if current_user.id == user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Cannot delete yourself",
|
||||
)
|
||||
|
||||
result = await db.execute(
|
||||
text("SELECT id FROM users WHERE id = :id"),
|
||||
{"id": user_id},
|
||||
)
|
||||
if not result.fetchone():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="User not found",
|
||||
)
|
||||
|
||||
await db.execute(
|
||||
text("DELETE FROM users WHERE id = :id"),
|
||||
{"id": user_id},
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
return {"message": "User deleted successfully"}
|
||||
99
backend/app/api/v1/websocket.py
Normal file
99
backend/app/api/v1/websocket.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""WebSocket API endpoints"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect, Query
|
||||
from jose import jwt, JWTError
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.websocket.manager import manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
async def authenticate_token(token: str) -> Optional[dict]:
|
||||
"""Authenticate WebSocket connection via token"""
|
||||
try:
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
||||
if payload.get("type") != "access":
|
||||
logger.warning(f"WebSocket auth failed: wrong token type")
|
||||
return None
|
||||
return payload
|
||||
except JWTError as e:
|
||||
logger.warning(f"WebSocket auth failed: {e}")
|
||||
return None
|
||||
|
||||
|
||||
@router.websocket("/ws")
|
||||
async def websocket_endpoint(
|
||||
websocket: WebSocket,
|
||||
token: str = Query(...),
|
||||
):
|
||||
"""WebSocket endpoint for real-time data"""
|
||||
logger.info(f"WebSocket connection attempt with token: {token[:20]}...")
|
||||
payload = await authenticate_token(token)
|
||||
if payload is None:
|
||||
logger.warning("WebSocket authentication failed, closing connection")
|
||||
await websocket.close(code=4001)
|
||||
return
|
||||
|
||||
user_id = str(payload.get("sub"))
|
||||
await manager.connect(websocket, user_id)
|
||||
|
||||
try:
|
||||
await websocket.send_json(
|
||||
{
|
||||
"type": "connection_established",
|
||||
"data": {
|
||||
"connection_id": f"conn_{user_id}",
|
||||
"server_version": settings.VERSION,
|
||||
"heartbeat_interval": 30,
|
||||
"supported_channels": [
|
||||
"gpu_clusters",
|
||||
"submarine_cables",
|
||||
"ixp_nodes",
|
||||
"alerts",
|
||||
"dashboard",
|
||||
],
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
while True:
|
||||
try:
|
||||
data = await asyncio.wait_for(websocket.receive_json(), timeout=30)
|
||||
|
||||
if data.get("type") == "heartbeat":
|
||||
await websocket.send_json(
|
||||
{
|
||||
"type": "heartbeat",
|
||||
"data": {"action": "pong", "timestamp": datetime.utcnow().isoformat()},
|
||||
}
|
||||
)
|
||||
elif data.get("type") == "subscribe":
|
||||
channels = data.get("data", {}).get("channels", [])
|
||||
await websocket.send_json(
|
||||
{
|
||||
"type": "subscription_confirmed",
|
||||
"data": {"action": "subscribe", "channels": channels},
|
||||
}
|
||||
)
|
||||
elif data.get("type") == "control_frame":
|
||||
await websocket.send_json(
|
||||
{"type": "control_acknowledged", "data": {"received": True}}
|
||||
)
|
||||
else:
|
||||
await websocket.send_json({"type": "ack", "data": {"received": True}})
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
await websocket.send_json({"type": "heartbeat", "data": {"action": "ping"}})
|
||||
|
||||
except WebSocketDisconnect:
|
||||
pass
|
||||
finally:
|
||||
manager.disconnect(websocket, user_id)
|
||||
Reference in New Issue
Block a user