first commit
This commit is contained in:
BIN
backend/app/core/__pycache__/cache.cpython-311.pyc
Normal file
BIN
backend/app/core/__pycache__/cache.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/core/__pycache__/config.cpython-311.pyc
Normal file
BIN
backend/app/core/__pycache__/config.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/core/__pycache__/security.cpython-311.pyc
Normal file
BIN
backend/app/core/__pycache__/security.cpython-311.pyc
Normal file
Binary file not shown.
128
backend/app/core/cache.py
Normal file
128
backend/app/core/cache.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""Redis caching service"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Optional, Any
|
||||
|
||||
import redis
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Lazy Redis client initialization
|
||||
class _RedisClient:
|
||||
_client = None
|
||||
|
||||
@classmethod
|
||||
def get_client(cls):
|
||||
if cls._client is None:
|
||||
# Parse REDIS_URL or use default
|
||||
redis_url = settings.REDIS_URL
|
||||
if redis_url.startswith("redis://"):
|
||||
cls._client = redis.from_url(redis_url, decode_responses=True)
|
||||
else:
|
||||
cls._client = redis.Redis(
|
||||
host=settings.REDIS_SERVER,
|
||||
port=settings.REDIS_PORT,
|
||||
db=settings.REDIS_DB,
|
||||
decode_responses=True,
|
||||
)
|
||||
return cls._client
|
||||
|
||||
|
||||
class CacheService:
|
||||
"""Redis caching service with JSON serialization"""
|
||||
|
||||
def __init__(self):
|
||||
self.client = _RedisClient.get_client()
|
||||
|
||||
def get(self, key: str) -> Optional[Any]:
|
||||
"""Get value from cache"""
|
||||
try:
|
||||
value = self.client.get(key)
|
||||
if value:
|
||||
return json.loads(value)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache get error: {e}")
|
||||
return None
|
||||
|
||||
def set(
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
expire_seconds: int = 300,
|
||||
) -> bool:
|
||||
"""Set value in cache with expiration"""
|
||||
try:
|
||||
serialized = json.dumps(value, default=str)
|
||||
return self.client.setex(key, expire_seconds, serialized)
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache set error: {e}")
|
||||
return False
|
||||
|
||||
def delete(self, key: str) -> bool:
|
||||
"""Delete key from cache"""
|
||||
try:
|
||||
return self.client.delete(key) > 0
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache delete error: {e}")
|
||||
return False
|
||||
|
||||
def delete_pattern(self, pattern: str) -> int:
|
||||
"""Delete all keys matching pattern"""
|
||||
try:
|
||||
keys = self.client.keys(pattern)
|
||||
if keys:
|
||||
return self.client.delete(*keys)
|
||||
return 0
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache delete_pattern error: {e}")
|
||||
return 0
|
||||
|
||||
def get_or_set(
|
||||
self,
|
||||
key: str,
|
||||
fallback: callable,
|
||||
expire_seconds: int = 300,
|
||||
) -> Optional[Any]:
|
||||
"""Get value from cache or set it using fallback"""
|
||||
value = self.get(key)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
value = fallback()
|
||||
if value is not None:
|
||||
self.set(key, value, expire_seconds)
|
||||
return value
|
||||
|
||||
def invalidate_pattern(self, pattern: str) -> int:
|
||||
"""Invalidate all keys matching pattern"""
|
||||
return self.delete_pattern(pattern)
|
||||
|
||||
|
||||
cache = CacheService()
|
||||
|
||||
|
||||
def cached(expire_seconds: int = 300, key_prefix: str = ""):
|
||||
"""Decorator for caching function results"""
|
||||
|
||||
def decorator(func):
|
||||
async def wrapper(*args, **kwargs):
|
||||
cache_key = f"{key_prefix}:{func.__name__}:{args}:{kwargs}"
|
||||
cache_key = cache_key.replace(":", "_").replace(" ", "")
|
||||
|
||||
cached_value = cache.get(cache_key)
|
||||
if cached_value is not None:
|
||||
return cached_value
|
||||
|
||||
result = await func(*args, **kwargs)
|
||||
cache.set(cache_key, result, expire_seconds)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
46
backend/app/core/config.py
Normal file
46
backend/app/core/config.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
import os
|
||||
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
PROJECT_NAME: str = "Intelligent Planet Plan"
|
||||
VERSION: str = "1.0.0"
|
||||
API_V1_STR: str = "/api/v1"
|
||||
SECRET_KEY: str = "your-secret-key-change-in-production"
|
||||
ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 15
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: int = 7
|
||||
|
||||
POSTGRES_SERVER: str = "localhost"
|
||||
POSTGRES_USER: str = "postgres"
|
||||
POSTGRES_PASSWORD: str = "postgres"
|
||||
POSTGRES_DB: str = "planet_db"
|
||||
DATABASE_URL: str = f"postgresql+asyncpg://postgres:postgres@postgres:5432/planet_db"
|
||||
|
||||
REDIS_SERVER: str = "localhost"
|
||||
REDIS_PORT: int = 6379
|
||||
REDIS_DB: int = 0
|
||||
|
||||
CORS_ORIGINS: List[str] = ["http://localhost:3000", "http://localhost:8000"]
|
||||
|
||||
@property
|
||||
def REDIS_URL(self) -> str:
|
||||
return os.getenv(
|
||||
"REDIS_URL", f"redis://{self.REDIS_SERVER}:{self.REDIS_PORT}/{self.REDIS_DB}"
|
||||
)
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
case_sensitive = True
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def get_settings() -> Settings:
|
||||
return Settings()
|
||||
|
||||
|
||||
settings = get_settings()
|
||||
162
backend/app/core/security.py
Normal file
162
backend/app/core/security.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import bcrypt
|
||||
import redis
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
from jose import JWTError, jwt
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.config import settings
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
|
||||
oauth2_scheme = HTTPBearer()
|
||||
|
||||
|
||||
class _RedisClient:
|
||||
_client = None
|
||||
|
||||
@classmethod
|
||||
def get_client(cls):
|
||||
if cls._client is None:
|
||||
redis_url = settings.REDIS_URL
|
||||
if redis_url.startswith("redis://"):
|
||||
cls._client = redis.from_url(redis_url, decode_responses=True)
|
||||
else:
|
||||
cls._client = redis.Redis(
|
||||
host=settings.REDIS_SERVER,
|
||||
port=settings.REDIS_PORT,
|
||||
db=settings.REDIS_DB,
|
||||
decode_responses=True,
|
||||
)
|
||||
return cls._client
|
||||
|
||||
|
||||
redis_client = _RedisClient.get_client()
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
return bcrypt.checkpw(plain_password.encode(), hashed_password.encode())
|
||||
|
||||
|
||||
def get_password_hash(password: str) -> str:
|
||||
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
|
||||
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
else:
|
||||
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
if "sub" in to_encode:
|
||||
to_encode["sub"] = str(to_encode["sub"])
|
||||
return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
||||
|
||||
|
||||
def create_refresh_token(data: dict) -> str:
|
||||
to_encode = data.copy()
|
||||
expire = datetime.utcnow() + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
if "sub" in to_encode:
|
||||
to_encode["sub"] = str(to_encode["sub"])
|
||||
return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
||||
|
||||
|
||||
def decode_token(token: str) -> Optional[dict]:
|
||||
try:
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
||||
return payload
|
||||
except JWTError:
|
||||
return None
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(oauth2_scheme),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> User:
|
||||
token = credentials.credentials
|
||||
if redis_client.sismember("blacklisted_tokens", token):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Token has been revoked",
|
||||
)
|
||||
payload = decode_token(token)
|
||||
if payload is None or payload.get("type") != "access":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid token",
|
||||
)
|
||||
user_id = payload.get("sub")
|
||||
if user_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid token",
|
||||
)
|
||||
result = await db.execute(
|
||||
text(
|
||||
"SELECT id, username, email, password_hash, role, is_active FROM users WHERE id = :id"
|
||||
),
|
||||
{"id": int(user_id)},
|
||||
)
|
||||
row = result.fetchone()
|
||||
if row is None or not row[5]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User not found or inactive",
|
||||
)
|
||||
user = User()
|
||||
user.id = row[0]
|
||||
user.username = row[1]
|
||||
user.email = row[2]
|
||||
user.password_hash = row[3]
|
||||
user.role = row[4]
|
||||
user.is_active = row[5]
|
||||
return user
|
||||
|
||||
|
||||
async def get_current_user_refresh(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(oauth2_scheme),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> User:
|
||||
token = credentials.credentials
|
||||
payload = decode_token(token)
|
||||
if payload is None or payload.get("type") != "refresh":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid refresh token",
|
||||
)
|
||||
user_id = payload.get("sub")
|
||||
if user_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid token",
|
||||
)
|
||||
result = await db.execute(
|
||||
text(
|
||||
"SELECT id, username, email, password_hash, role, is_active FROM users WHERE id = :id"
|
||||
),
|
||||
{"id": int(user_id)},
|
||||
)
|
||||
row = result.fetchone()
|
||||
if row is None or not row[5]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User not found or inactive",
|
||||
)
|
||||
user = User()
|
||||
user.id = row[0]
|
||||
user.username = row[1]
|
||||
user.email = row[2]
|
||||
user.password_hash = row[3]
|
||||
user.role = row[4]
|
||||
user.is_active = row[5]
|
||||
return user
|
||||
|
||||
|
||||
def blacklist_token(token: str) -> None:
|
||||
redis_client.sadd("blacklisted_tokens", token)
|
||||
4
backend/app/core/websocket/__init__.py
Normal file
4
backend/app/core/websocket/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""__init__.py for websocket package"""
|
||||
|
||||
from app.core.websocket.manager import manager, ConnectionManager
|
||||
from app.core.websocket.broadcaster import broadcaster, DataBroadcaster
|
||||
BIN
backend/app/core/websocket/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/app/core/websocket/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
backend/app/core/websocket/__pycache__/manager.cpython-311.pyc
Normal file
BIN
backend/app/core/websocket/__pycache__/manager.cpython-311.pyc
Normal file
Binary file not shown.
93
backend/app/core/websocket/broadcaster.py
Normal file
93
backend/app/core/websocket/broadcaster.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Data broadcaster for WebSocket connections"""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from app.core.websocket.manager import manager
|
||||
|
||||
|
||||
class DataBroadcaster:
|
||||
"""Periodically broadcasts data to connected WebSocket clients"""
|
||||
|
||||
def __init__(self):
|
||||
self.running = False
|
||||
self.tasks: Dict[str, asyncio.Task] = {}
|
||||
|
||||
async def get_dashboard_stats(self) -> Dict[str, Any]:
|
||||
"""Get dashboard statistics"""
|
||||
return {
|
||||
"total_datasources": 9,
|
||||
"active_datasources": 8,
|
||||
"tasks_today": 45,
|
||||
"success_rate": 97.8,
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
"alerts": {"critical": 0, "warning": 2, "info": 5},
|
||||
}
|
||||
|
||||
async def broadcast_stats(self, interval: int = 5):
|
||||
"""Broadcast dashboard stats periodically"""
|
||||
while self.running:
|
||||
try:
|
||||
stats = await self.get_dashboard_stats()
|
||||
await manager.broadcast(
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": "dashboard",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"payload": {"stats": stats},
|
||||
},
|
||||
channel="dashboard",
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
async def broadcast_alert(self, alert: Dict[str, Any]):
|
||||
"""Broadcast an alert to all connected clients"""
|
||||
await manager.broadcast(
|
||||
{
|
||||
"type": "alert_notification",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"data": {"alert": alert},
|
||||
}
|
||||
)
|
||||
|
||||
async def broadcast_gpu_update(self, data: Dict[str, Any]):
|
||||
"""Broadcast GPU cluster update"""
|
||||
await manager.broadcast(
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": "gpu_clusters",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"payload": data,
|
||||
}
|
||||
)
|
||||
|
||||
async def broadcast_custom(self, channel: str, data: Dict[str, Any]):
|
||||
"""Broadcast custom data to a specific channel"""
|
||||
await manager.broadcast(
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": channel,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"payload": data,
|
||||
},
|
||||
channel=channel if channel in manager.active_connections else "all",
|
||||
)
|
||||
|
||||
def start(self):
|
||||
"""Start all broadcasters"""
|
||||
if not self.running:
|
||||
self.running = True
|
||||
self.tasks["dashboard"] = asyncio.create_task(self.broadcast_stats(5))
|
||||
|
||||
def stop(self):
|
||||
"""Stop all broadcasters"""
|
||||
self.running = False
|
||||
for task in self.tasks.values():
|
||||
task.cancel()
|
||||
self.tasks.clear()
|
||||
|
||||
|
||||
broadcaster = DataBroadcaster()
|
||||
70
backend/app/core/websocket/manager.py
Normal file
70
backend/app/core/websocket/manager.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""WebSocket Connection Manager"""
|
||||
|
||||
import json
|
||||
import asyncio
|
||||
from typing import Dict, Set, Optional
|
||||
from datetime import datetime
|
||||
from fastapi import WebSocket
|
||||
import redis.asyncio as redis
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
class ConnectionManager:
|
||||
"""Manages WebSocket connections"""
|
||||
|
||||
def __init__(self):
|
||||
self.active_connections: Dict[str, Set[WebSocket]] = {} # user_id -> connections
|
||||
self.redis_client: Optional[redis.Redis] = None
|
||||
|
||||
async def connect(self, websocket: WebSocket, user_id: str):
|
||||
await websocket.accept()
|
||||
if user_id not in self.active_connections:
|
||||
self.active_connections[user_id] = set()
|
||||
self.active_connections[user_id].add(websocket)
|
||||
|
||||
if self.redis_client is None:
|
||||
redis_url = settings.REDIS_URL
|
||||
if redis_url.startswith("redis://"):
|
||||
self.redis_client = redis.from_url(redis_url, decode_responses=True)
|
||||
else:
|
||||
self.redis_client = redis.Redis(
|
||||
host=settings.REDIS_SERVER,
|
||||
port=settings.REDIS_PORT,
|
||||
db=settings.REDIS_DB,
|
||||
decode_responses=True,
|
||||
)
|
||||
|
||||
def disconnect(self, websocket: WebSocket, user_id: str):
|
||||
if user_id in self.active_connections:
|
||||
self.active_connections[user_id].discard(websocket)
|
||||
if not self.active_connections[user_id]:
|
||||
del self.active_connections[user_id]
|
||||
|
||||
async def send_personal_message(self, message: dict, user_id: str):
|
||||
if user_id in self.active_connections:
|
||||
for connection in self.active_connections[user_id]:
|
||||
try:
|
||||
await connection.send_json(message)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
async def broadcast(self, message: dict, channel: str = "all"):
|
||||
if channel == "all":
|
||||
for user_id in self.active_connections:
|
||||
await self.send_personal_message(message, user_id)
|
||||
else:
|
||||
await self.send_personal_message(message, channel)
|
||||
|
||||
async def close_all(self):
|
||||
for user_id in self.active_connections:
|
||||
for connection in self.active_connections[user_id]:
|
||||
await connection.close()
|
||||
self.active_connections.clear()
|
||||
|
||||
|
||||
manager = ConnectionManager()
|
||||
|
||||
|
||||
async def get_websocket_manager() -> ConnectionManager:
|
||||
return manager
|
||||
Reference in New Issue
Block a user