Compare commits
50 Commits
b06cb4606f
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a3ca6e1b3 | ||
|
|
62f2d9f403 | ||
|
|
b448a1e560 | ||
|
|
2cc0c9412c | ||
|
|
3dd210a3e5 | ||
|
|
a761dfc5fb | ||
|
|
7ec9586f7a | ||
|
|
b0058edf17 | ||
|
|
bf2c4a172d | ||
|
|
30a29a6e34 | ||
|
|
ab09f0ba78 | ||
|
|
7b53cf9a06 | ||
|
|
a04f4f9e67 | ||
|
|
ce5feba3b9 | ||
|
|
3fd6cbb6f7 | ||
|
|
020c1d5051 | ||
|
|
cc5f16f8a7 | ||
|
|
ef0fefdfc7 | ||
|
|
81a0ca5e7a | ||
|
|
b57d69c98b | ||
|
|
b9fbacade7 | ||
|
|
543fe35fbb | ||
|
|
1784c057e5 | ||
|
|
465129eec7 | ||
|
|
0c950262d3 | ||
|
|
eabdbdc85a | ||
|
|
af29e90cb0 | ||
|
|
d9a64f7768 | ||
|
|
78bb639a83 | ||
|
|
96222b9e4c | ||
|
|
3fcbae55dc | ||
|
|
3e3090d72a | ||
|
|
4f922f13d1 | ||
|
|
bb6b18fe3b | ||
|
|
0ecc1bc537 | ||
|
|
869d661a94 | ||
|
|
d18e400fcb | ||
|
|
6fabbcfe5c | ||
|
|
1189fec014 | ||
|
|
82f7aa29a6 | ||
|
|
777891f865 | ||
|
|
c2eba54da0 | ||
|
|
f50830712c | ||
|
|
e21b783bef | ||
|
|
11a9dda942 | ||
|
|
3b0e9dec5a | ||
|
|
c82e1d5a04 | ||
|
|
02991730e5 | ||
|
|
4e487b315a | ||
|
|
948af2c88f |
2
.gitignore
vendored
@@ -41,6 +41,8 @@ MANIFEST
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
.uv/
|
||||
.uv-cache/
|
||||
.ruff_cache/
|
||||
*.db
|
||||
*.sqlite
|
||||
|
||||
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.14
|
||||
165
.sisyphus/plans/earth-architecture-refactor.md
Normal file
@@ -0,0 +1,165 @@
|
||||
# 地球3D可视化架构重构计划
|
||||
|
||||
## 背景
|
||||
|
||||
当前 `frontend/public/earth` 3D地球可视化系统基于 Three.js 构建,未来需要迁移到 Unreal Engine (Cesium)。为降低迁移成本,需要提前做好**逻辑与渲染分离**的架构设计。
|
||||
|
||||
## 目标
|
||||
|
||||
- 将线缆高亮逻辑与渲染实现分离
|
||||
- 保持交互逻辑可复用,只需重写渲染层
|
||||
- 为后续迁移到 UE/Cesium 做好准备
|
||||
|
||||
## 已完成
|
||||
|
||||
### 1. 状态枚举定义 (constants.js)
|
||||
|
||||
```javascript
|
||||
export const CABLE_STATE = {
|
||||
NORMAL: 'normal',
|
||||
HOVERED: 'hovered',
|
||||
LOCKED: 'locked'
|
||||
};
|
||||
```
|
||||
|
||||
### 2. 线缆状态管理 (cables.js - 数据层)
|
||||
|
||||
```javascript
|
||||
const cableStates = new Map();
|
||||
|
||||
export function getCableState(cableId) { ... }
|
||||
export function setCableState(cableId, state) { ... }
|
||||
export function clearAllCableStates() { ... }
|
||||
export function getCableStateInfo() { ... }
|
||||
```
|
||||
|
||||
### 3. 逻辑层调用 (main.js)
|
||||
|
||||
```javascript
|
||||
// 悬停
|
||||
setCableState(cable.userData.cableId, CABLE_STATE.HOVERED);
|
||||
|
||||
// 锁定
|
||||
setCableState(cableId, CABLE_STATE.LOCKED);
|
||||
|
||||
// 恢复
|
||||
setCableState(cableId, CABLE_STATE.NORMAL);
|
||||
clearAllCableStates();
|
||||
|
||||
// 清除锁定时
|
||||
clearLockedObject() {
|
||||
hoveredCable = null;
|
||||
clearAllCableStates();
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### 4. 渲染层 (main.js - applyCableVisualState)
|
||||
|
||||
```javascript
|
||||
function applyCableVisualState() {
|
||||
const allCables = getCableLines();
|
||||
const pulse = (Math.sin(Date.now() * CABLE_CONFIG.pulseSpeed) + 1) * 0.5;
|
||||
|
||||
allCables.forEach(c => {
|
||||
const cableId = c.userData.cableId;
|
||||
const state = getCableState(cableId);
|
||||
|
||||
switch (state) {
|
||||
case CABLE_STATE.LOCKED:
|
||||
// 呼吸效果 + 白色
|
||||
c.material.opacity = CABLE_CONFIG.lockedOpacityMin + pulse * CABLE_CONFIG.pulseCoefficient;
|
||||
c.material.color.setRGB(1, 1, 1);
|
||||
break;
|
||||
case CABLE_STATE.HOVERED:
|
||||
// 白色高亮
|
||||
c.material.opacity = 1;
|
||||
c.material.color.setRGB(1, 1, 1);
|
||||
break;
|
||||
case CABLE_STATE.NORMAL:
|
||||
default:
|
||||
if (lockedObjectType === 'cable' && lockedObject) {
|
||||
// 其他线缆变暗
|
||||
c.material.opacity = CABLE_CONFIG.otherOpacity;
|
||||
...
|
||||
} else {
|
||||
// 恢复原始
|
||||
c.material.opacity = 1;
|
||||
c.material.color.setHex(c.userData.originalColor);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
## 待完成
|
||||
|
||||
### Phase 1: 完善状态配置 (constants.js)
|
||||
|
||||
```javascript
|
||||
export const CABLE_CONFIG = {
|
||||
lockedOpacityMin: 0.6,
|
||||
lockedOpacityMax: 1.0,
|
||||
otherOpacity: 0.5,
|
||||
otherBrightness: 0.6,
|
||||
pulseSpeed: 0.003,
|
||||
pulseCoefficient: 0.4,
|
||||
// 未来可扩展
|
||||
// lockedLineWidth: 3,
|
||||
// normalLineWidth: 1,
|
||||
};
|
||||
```
|
||||
|
||||
### Phase 2: 卫星状态管理 (satellites.js)
|
||||
|
||||
参考线缆状态管理,为卫星添加类似的状态枚举和状态管理函数:
|
||||
|
||||
```javascript
|
||||
export const SATELLITE_STATE = {
|
||||
NORMAL: 'normal',
|
||||
HOVERED: 'hovered',
|
||||
LOCKED: 'locked'
|
||||
};
|
||||
```
|
||||
|
||||
#### 卫星数据源说明
|
||||
|
||||
- **当前使用**: CelesTrak (https://celestrak.org) - 免费,无需认证
|
||||
- **后续计划**: Space-Track.org (https://space-track.org) - 需要认证,数据更权威
|
||||
- 迁移时只需修改 `satellites.js` 中的数据获取逻辑,状态管理和渲染逻辑不变
|
||||
|
||||
### Phase 3: 统一渲染接口
|
||||
|
||||
将所有对象的渲染逻辑抽象为一个统一的渲染函数:
|
||||
|
||||
```javascript
|
||||
function applyObjectVisualState() {
|
||||
applyCableVisualState();
|
||||
applySatelliteVisualState();
|
||||
applyLandingPointVisualState();
|
||||
}
|
||||
```
|
||||
|
||||
### Phase 4: UE 迁移准备
|
||||
|
||||
迁移到 Unreal Engine 时:
|
||||
1. 保留 `constants.js` 中的枚举和配置
|
||||
2. 保留 `cables.js` 中的数据层和状态管理
|
||||
3. 保留 `main.js` 中的交互逻辑
|
||||
4. **仅重写** `applyCableVisualState()` 等渲染函数
|
||||
|
||||
---
|
||||
|
||||
## 架构原则
|
||||
|
||||
1. **状态与渲染分离** - 对象状态由数据层管理,渲染层只负责根据状态更新视觉效果
|
||||
2. **逻辑可复用** - 交互逻辑(点击、悬停、锁定)在迁移时应直接复用
|
||||
3. **渲染可替换** - 渲染实现可以针对不同引擎重写,不影响逻辑层
|
||||
|
||||
## 文件变更记录
|
||||
|
||||
| 日期 | 文件 | 变更 |
|
||||
|------|------|------|
|
||||
| 2026-03-19 | constants.js | 新增 CABLE_STATE 枚举 |
|
||||
| 2026-03-19 | cables.js | 新增状态管理函数 |
|
||||
| 2026-03-19 | main.js | 使用状态管理,抽象 applyCableVisualState() |
|
||||
136
.sisyphus/plans/predicted-orbit.md
Normal file
@@ -0,0 +1,136 @@
|
||||
# 卫星预测轨道显示功能
|
||||
|
||||
## TL;DR
|
||||
> 锁定卫星时显示绕地球完整一圈的预测轨道轨迹,从当前位置向外渐变消失
|
||||
|
||||
## Context
|
||||
|
||||
### 目标
|
||||
点击锁定卫星 → 显示该卫星绕地球一周的完整预测轨道(而非当前的历史轨迹)
|
||||
|
||||
### 当前实现
|
||||
- `TRAIL_LENGTH = 30` - 历史轨迹点数,每帧 push 当前位置
|
||||
- 显示最近30帧历史轨迹(类似彗星尾巴)
|
||||
|
||||
### 参考: SatelliteMap.space
|
||||
- 锁定时显示预测轨道
|
||||
- 颜色从当前位置向外渐变消失
|
||||
- 使用 satellite.js(与本项目相同)
|
||||
|
||||
## 实现状态
|
||||
|
||||
### ✅ 已完成
|
||||
- [x] 计算卫星轨道周期(基于 `meanMotion`)
|
||||
- [x] 生成预测轨道点(10秒采样间隔)
|
||||
- [x] 创建独立预测轨道渲染对象
|
||||
- [x] 锁定卫星时显示预测轨道
|
||||
- [x] 解除锁定时隐藏预测轨道
|
||||
- [x] 颜色渐变:当前位置(亮) → 轨道终点(暗)
|
||||
- [x] 页面隐藏时清除轨迹(防止切回时闪现)
|
||||
|
||||
### 🚧 进行中
|
||||
- [ ] 完整圆环轨道(部分卫星因 SGP4 计算问题使用 fallback 圆形轨道)
|
||||
- [ ] 每颗卫星只显示一条轨道
|
||||
|
||||
## 技术细节
|
||||
|
||||
### 轨道周期计算
|
||||
```javascript
|
||||
function calculateOrbitalPeriod(meanMotion) {
|
||||
return 86400 / meanMotion;
|
||||
}
|
||||
```
|
||||
|
||||
### 预测轨道计算
|
||||
```javascript
|
||||
function calculatePredictedOrbit(satellite, periodSeconds, sampleInterval = 10) {
|
||||
const points = [];
|
||||
const samples = Math.ceil(periodSeconds / sampleInterval);
|
||||
const now = new Date();
|
||||
|
||||
// Full orbit: from now to now+period
|
||||
for (let i = 0; i <= samples; i++) {
|
||||
const time = new Date(now.getTime() + i * sampleInterval * 1000);
|
||||
const pos = computeSatellitePosition(satellite, time);
|
||||
if (pos) points.push(pos);
|
||||
}
|
||||
|
||||
// Fallback: 如果真实位置计算点太少,使用圆形 fallback
|
||||
if (points.length < samples * 0.5) {
|
||||
points.length = 0;
|
||||
// ... 圆形轨道生成
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
```
|
||||
|
||||
### 渲染对象
|
||||
```javascript
|
||||
let predictedOrbitLine = null;
|
||||
|
||||
export function showPredictedOrbit(satellite) {
|
||||
hidePredictedOrbit();
|
||||
// ... 计算并渲染轨道
|
||||
}
|
||||
|
||||
export function hidePredictedOrbit() {
|
||||
if (predictedOrbitLine) {
|
||||
earthObjRef.remove(predictedOrbitLine);
|
||||
predictedOrbitLine.geometry.dispose();
|
||||
predictedOrbitLine.material.dispose();
|
||||
predictedOrbitLine = null;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 已知问题
|
||||
|
||||
### 1. TLE 格式问题
|
||||
`computeSatellitePosition` 使用自行构建的 TLE 格式,对某些卫星返回 null。当前使用 fallback 圆形轨道作为补偿。
|
||||
|
||||
### 2. 多条轨道
|
||||
部分情况下锁定时会显示多条轨道。需要确保 `hidePredictedOrbit()` 被正确调用。
|
||||
|
||||
## 性能考虑
|
||||
|
||||
### 点数估算
|
||||
| 卫星类型 | 周期 | 10秒采样 | 点数 |
|
||||
|---------|------|---------|------|
|
||||
| LEO | 90分钟 | 540秒 | ~54点 |
|
||||
| MEO | 12小时 | 4320秒 | ~432点 |
|
||||
| GEO | 24小时 | 8640秒 | ~864点 |
|
||||
|
||||
### 优化策略
|
||||
- 当前方案(~900点 GEO)性能可接受
|
||||
- 如遇性能问题:GEO 降低采样率到 30秒
|
||||
|
||||
## 验证方案
|
||||
|
||||
### QA Scenarios
|
||||
|
||||
**Scenario: 锁定 Starlink 卫星显示预测轨道**
|
||||
1. 打开浏览器,进入 Earth 页面
|
||||
2. 显示卫星(点击按钮)
|
||||
3. 点击一颗 Starlink 卫星(低轨道 LEO)
|
||||
4. 验证:出现黄色预测轨道线,从卫星向外绕行
|
||||
5. 验证:颜色从亮黄渐变到暗蓝
|
||||
6. 验证:轨道完整闭环
|
||||
|
||||
**Scenario: 锁定 GEO 卫星显示预测轨道**
|
||||
1. 筛选一颗 GEO 卫星(倾斜角 0-10° 或高轨道)
|
||||
2. 点击锁定
|
||||
3. 验证:显示完整 24 小时轨道(或 fallback 圆形轨道)
|
||||
4. 验证:点数合理(~864点或 fallback)
|
||||
|
||||
**Scenario: 解除锁定隐藏预测轨道**
|
||||
1. 锁定一颗卫星,显示预测轨道
|
||||
2. 点击地球空白处解除锁定
|
||||
3. 验证:预测轨道消失
|
||||
|
||||
**Scenario: 切换页面后轨迹不闪现**
|
||||
1. 锁定一颗卫星
|
||||
2. 切换到其他标签页
|
||||
3. 等待几秒
|
||||
4. 切回页面
|
||||
5. 验证:轨迹不突然闪现累积
|
||||
293
.sisyphus/plans/webgl-instancing-satellites.md
Normal file
@@ -0,0 +1,293 @@
|
||||
# WebGL Instancing 卫星渲染优化计划
|
||||
|
||||
## 背景
|
||||
|
||||
当前 `satellites.js` 使用 `THREE.Points` 渲染卫星,受限于 WebGL 点渲染性能,只能显示 ~500-1000 颗卫星。
|
||||
需要迁移到真正的 WebGL Instancing 以支持 5000+ 卫星流畅渲染。
|
||||
|
||||
## 技术选型
|
||||
|
||||
| 方案 | 性能 | 改动量 | 维护性 | 推荐 |
|
||||
|------|------|--------|--------|------|
|
||||
| THREE.Points (现状) | ★★☆ | - | - | 基准 |
|
||||
| THREE.InstancedMesh | ★★★ | 中 | 高 | 不适合点 |
|
||||
| InstancedBufferGeometry + 自定义Shader | ★★★★ | 中高 | 中 | ✅ 推荐 |
|
||||
| 迁移到 TWGL.js / Raw WebGL | ★★★★★ | 高 | 低 | 未来UE |
|
||||
|
||||
**推荐方案**: InstancedBufferGeometry + 自定义 Shader
|
||||
- 保持 Three.js 架构
|
||||
- 复用 satellite.js 数据层
|
||||
- 性能接近原生 WebGL
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: 调研与原型
|
||||
|
||||
### 1.1 分析现有架构
|
||||
|
||||
**现状 (satellites.js)**:
|
||||
```javascript
|
||||
// 创建点云
|
||||
const pointsGeometry = new THREE.BufferGeometry();
|
||||
pointsGeometry.setAttribute('position', new THREE.BufferAttribute(positions, 3));
|
||||
pointsGeometry.setAttribute('color', new THREE.BufferAttribute(colors, 3));
|
||||
|
||||
const pointsMaterial = new THREE.PointsMaterial({
|
||||
size: 2,
|
||||
vertexColors: true,
|
||||
transparent: true,
|
||||
opacity: 0.8,
|
||||
sizeAttenuation: true
|
||||
});
|
||||
|
||||
satellitePoints = new THREE.Points(pointsGeometry, pointsMaterial);
|
||||
```
|
||||
|
||||
**问题**: 每个卫星作为一个顶点,GPU 需要处理 ~500 个 draw calls (取决于视锥体裁剪)
|
||||
|
||||
### 1.2 Instanced Rendering 原理
|
||||
|
||||
```javascript
|
||||
// 目标:单次 draw call 渲染所有卫星
|
||||
// 每个卫星属性:
|
||||
// - position (vec3): 位置
|
||||
// - color (vec3): 颜色
|
||||
// - size (float): 大小 (可选)
|
||||
// - selected (float): 是否选中 (0/1)
|
||||
|
||||
// 使用 InstancedBufferGeometry
|
||||
const geometry = new THREE.InstancedBufferGeometry();
|
||||
geometry.index = originalGeometry.index;
|
||||
geometry.attributes.position = originalGeometry.attributes.position;
|
||||
geometry.attributes.uv = originalGeometry.attributes.uv;
|
||||
|
||||
// 实例数据
|
||||
const instancePositions = new Float32Array(satelliteCount * 3);
|
||||
const instanceColors = new Float32Array(satelliteCount * 3);
|
||||
|
||||
geometry.setAttribute('instancePosition',
|
||||
new THREE.InstancedBufferAttribute(instancePositions, 3));
|
||||
geometry.setAttribute('instanceColor',
|
||||
new THREE.InstancedBufferAttribute(instanceColors, 3));
|
||||
|
||||
// 自定义 Shader
|
||||
const material = new THREE.ShaderMaterial({
|
||||
vertexShader: `
|
||||
attribute vec3 instancePosition;
|
||||
attribute vec3 instanceColor;
|
||||
varying vec3 vColor;
|
||||
|
||||
void main() {
|
||||
vColor = instanceColor;
|
||||
vec3 transformed = position + instancePosition;
|
||||
gl_Position = projectionMatrix * modelViewMatrix * vec4(transformed, 1.0);
|
||||
}
|
||||
`,
|
||||
fragmentShader: `
|
||||
varying vec3 vColor;
|
||||
void main() {
|
||||
gl_FragColor = vec4(vColor, 0.8);
|
||||
}
|
||||
`
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: 实现
|
||||
|
||||
### 2.1 创建 instanced-satellites.js
|
||||
|
||||
```javascript
|
||||
// instanced-satellites.js - Instanced rendering for satellites
|
||||
|
||||
import * as THREE from 'three';
|
||||
import { SATELLITE_CONFIG } from './constants.js';
|
||||
|
||||
let instancedMesh = null;
|
||||
let satelliteData = [];
|
||||
let instancePositions = null;
|
||||
let instanceColors = null;
|
||||
let satelliteCount = 0;
|
||||
|
||||
const SATELLITE_VERTEX_SHADER = `
|
||||
attribute vec3 instancePosition;
|
||||
attribute vec3 instanceColor;
|
||||
attribute float instanceSize;
|
||||
|
||||
varying vec3 vColor;
|
||||
|
||||
void main() {
|
||||
vColor = instanceColor;
|
||||
vec3 transformed = position * instanceSize + instancePosition;
|
||||
gl_Position = projectionMatrix * modelViewMatrix * vec4(transformed, 1.0);
|
||||
}
|
||||
`;
|
||||
|
||||
const SATELLITE_FRAGMENT_SHADER = `
|
||||
varying vec3 vColor;
|
||||
|
||||
void main() {
|
||||
gl_FragColor = vec4(vColor, 0.9);
|
||||
}
|
||||
`;
|
||||
|
||||
export function createInstancedSatellites(scene, earthObj) {
|
||||
// 基础球体几何 (每个卫星是一个小圆点)
|
||||
const baseGeometry = new THREE.CircleGeometry(1, 8);
|
||||
|
||||
// 创建 InstancedBufferGeometry
|
||||
const geometry = new THREE.InstancedBufferGeometry();
|
||||
geometry.index = baseGeometry.index;
|
||||
geometry.attributes.position = baseGeometry.attributes.position;
|
||||
geometry.attributes.uv = baseGeometry.attributes.uv;
|
||||
|
||||
// 初始化实例数据数组 (稍后填充)
|
||||
instancePositions = new Float32Array(MAX_SATELLITES * 3);
|
||||
instanceColors = new Float32Array(MAX_SATELLITES * 3);
|
||||
const instanceSizes = new Float32Array(MAX_SATELLITES);
|
||||
|
||||
geometry.setAttribute('instancePosition',
|
||||
new THREE.InstancedBufferAttribute(instancePositions, 3));
|
||||
geometry.setAttribute('instanceColor',
|
||||
new THREE.InstancedBufferAttribute(instanceColors, 3));
|
||||
geometry.setAttribute('instanceSize',
|
||||
new THREE.InstancedBufferAttribute(instanceSizes, 1));
|
||||
|
||||
const material = new THREE.ShaderMaterial({
|
||||
vertexShader: SATELLITE_VERTEX_SHADER,
|
||||
fragmentShader: SATELLITE_FRAGMENT_SHADER,
|
||||
transparent: true,
|
||||
side: THREE.DoubleSide
|
||||
});
|
||||
|
||||
instancedMesh = new THREE.Mesh(geometry, material);
|
||||
instancedMesh.frustumCulled = false; // 我们自己处理裁剪
|
||||
scene.add(instancedMesh);
|
||||
|
||||
return instancedMesh;
|
||||
}
|
||||
|
||||
export function updateInstancedSatellites(satellitePositions) {
|
||||
// satellitePositions: Array of { position: Vector3, color: Color }
|
||||
const count = Math.min(satellitePositions.length, MAX_SATELLITES);
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const sat = satellitePositions[i];
|
||||
instancePositions[i * 3] = sat.position.x;
|
||||
instancePositions[i * 3 + 1] = sat.position.y;
|
||||
instancePositions[i * 3 + 2] = sat.position.z;
|
||||
|
||||
instanceColors[i * 3] = sat.color.r;
|
||||
instanceColors[i * 3 + 1] = sat.color.g;
|
||||
instanceColors[i * 3 + 2] = sat.color.b;
|
||||
}
|
||||
|
||||
instancedMesh.geometry.attributes.instancePosition.needsUpdate = true;
|
||||
instancedMesh.geometry.attributes.instanceColor.needsUpdate = true;
|
||||
instancedMesh.geometry.setDrawRange(0, count);
|
||||
}
|
||||
```
|
||||
|
||||
### 2.2 修改现有 satellites.js
|
||||
|
||||
保持数据层不变,添加新渲染模式:
|
||||
|
||||
```javascript
|
||||
// 添加配置
|
||||
export const SATELLITE_CONFIG = {
|
||||
USE_INSTANCING: true, // 切换渲染模式
|
||||
MAX_SATELLITES: 5000,
|
||||
SATELLITE_SIZE: 0.5,
|
||||
// ...
|
||||
};
|
||||
```
|
||||
|
||||
### 2.3 性能优化点
|
||||
|
||||
1. **GPU 实例化**: 单次 draw call 渲染所有卫星
|
||||
2. **批量更新**: 所有位置/颜色一次更新
|
||||
3. **视锥体裁剪**: 自定义裁剪逻辑,避免 CPU 端逐卫星检测
|
||||
4. **LOD (可选)**: 远处卫星简化显示
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: 与现有系统集成
|
||||
|
||||
### 3.1 悬停/选中处理
|
||||
|
||||
当前通过 `selectSatellite()` 设置选中状态,Instanced 模式下需要:
|
||||
|
||||
```javascript
|
||||
// 在 shader 中通过 instanceId 判断是否选中
|
||||
// 或者使用单独的 InstancedBufferAttribute 存储选中状态
|
||||
const instanceSelected = new Float32Array(MAX_SATELLITES);
|
||||
geometry.setAttribute('instanceSelected',
|
||||
new THREE.InstancedBufferAttribute(instanceSelected, 1));
|
||||
```
|
||||
|
||||
### 3.2 轨迹线
|
||||
|
||||
轨迹线仍然使用 `THREE.Line` 或 `THREE.LineSegments`,但可以类似地 Instanced 化:
|
||||
|
||||
```javascript
|
||||
// Instanced LineSegments for trails
|
||||
const trailGeometry = new THREE.InstancedBufferGeometry();
|
||||
trailGeometry.setAttribute('position', trailPositions);
|
||||
trailGeometry.setAttribute('instanceStart', ...);
|
||||
trailGeometry.setAttribute('instanceEnd', ...);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: 验证与调优
|
||||
|
||||
### 4.1 性能测试
|
||||
|
||||
| 卫星数量 | Points 模式 | Instanced 模式 |
|
||||
|----------|-------------|----------------|
|
||||
| 500 | ✅ 60fps | ✅ 60fps |
|
||||
| 2000 | ⚠️ 30fps | ✅ 60fps |
|
||||
| 5000 | ❌ 10fps | ✅ 45fps |
|
||||
| 10000 | ❌ 卡顿 | ⚠️ 30fps |
|
||||
|
||||
### 4.2 可能遇到的问题
|
||||
|
||||
1. **Shader 编译错误**: 需要调试 GLSL
|
||||
2. **实例数量限制**: GPU 最大实例数 (通常 65535)
|
||||
3. **大小不一**: 需要 per-instance size 属性
|
||||
4. **透明度排序**: Instanced 渲染透明度处理复杂
|
||||
|
||||
---
|
||||
|
||||
## 文件变更清单
|
||||
|
||||
| 文件 | 变更 |
|
||||
|------|------|
|
||||
| `constants.js` | 新增 `SATELLITE_CONFIG` |
|
||||
| `satellites.js` | 添加 Instanced 模式支持 |
|
||||
| `instanced-satellites.js` | 新文件 - Instanced 渲染核心 |
|
||||
| `main.js` | 集成新渲染模块 |
|
||||
|
||||
---
|
||||
|
||||
## 时间估算
|
||||
|
||||
| Phase | 工作量 | 难度 |
|
||||
|-------|--------|------|
|
||||
| Phase 1 | 1-2 天 | 低 |
|
||||
| Phase 2 | 2-3 天 | 中 |
|
||||
| Phase 3 | 1-2 天 | 中 |
|
||||
| Phase 4 | 1 天 | 低 |
|
||||
| **总计** | **5-8 天** | - |
|
||||
|
||||
---
|
||||
|
||||
## 替代方案考虑
|
||||
|
||||
如果 Phase 2 实施困难,可以考虑:
|
||||
|
||||
1. **使用 Three.js InstancedMesh**: 适合渲染小型 3D 模型替代点
|
||||
2. **使用 pointcloud2 格式**: 类似 LiDAR 点云渲染
|
||||
3. **Web Workers**: 将轨道计算移到 Worker 线程
|
||||
4. **迁移到 Cesium**: Cesium 原生支持 Instancing,且是 UE 迁移的中间步骤
|
||||
18
README.md
@@ -184,14 +184,20 @@
|
||||
## 快速启动
|
||||
|
||||
```bash
|
||||
# 启动全部服务
|
||||
docker-compose up -d
|
||||
# 启动前后端服务
|
||||
./planet.sh start
|
||||
|
||||
# 仅启动后端
|
||||
cd backend && python -m uvicorn app.main:app --reload
|
||||
# 仅重启后端
|
||||
./planet.sh restart -b
|
||||
|
||||
# 仅启动前端
|
||||
cd frontend && npm run dev
|
||||
# 仅重启前端
|
||||
./planet.sh restart -f
|
||||
|
||||
# 交互创建用户
|
||||
./planet.sh createuser
|
||||
|
||||
# 查看服务状态
|
||||
./planet.sh health
|
||||
```
|
||||
|
||||
## API 文档
|
||||
|
||||
@@ -16,4 +16,4 @@ COPY . .
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||
|
||||
@@ -11,6 +11,7 @@ from app.api.v1 import (
|
||||
settings,
|
||||
collected_data,
|
||||
visualization,
|
||||
bgp,
|
||||
)
|
||||
|
||||
api_router = APIRouter()
|
||||
@@ -27,3 +28,4 @@ api_router.include_router(dashboard.router, prefix="/dashboard", tags=["dashboar
|
||||
api_router.include_router(alerts.router, prefix="/alerts", tags=["alerts"])
|
||||
api_router.include_router(settings.router, prefix="/settings", tags=["settings"])
|
||||
api_router.include_router(visualization.router, prefix="/visualization", tags=["visualization"])
|
||||
api_router.include_router(bgp.router, prefix="/bgp", tags=["bgp"])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
@@ -68,7 +68,7 @@ async def acknowledge_alert(
|
||||
|
||||
alert.status = AlertStatus.ACKNOWLEDGED
|
||||
alert.acknowledged_by = current_user.id
|
||||
alert.acknowledged_at = datetime.utcnow()
|
||||
alert.acknowledged_at = datetime.now(UTC)
|
||||
await db.commit()
|
||||
|
||||
return {"message": "Alert acknowledged", "alert": alert.to_dict()}
|
||||
@@ -89,7 +89,7 @@ async def resolve_alert(
|
||||
|
||||
alert.status = AlertStatus.RESOLVED
|
||||
alert.resolved_by = current_user.id
|
||||
alert.resolved_at = datetime.utcnow()
|
||||
alert.resolved_at = datetime.now(UTC)
|
||||
alert.resolution_notes = resolution
|
||||
await db.commit()
|
||||
|
||||
|
||||
182
backend/app/api/v1/bgp.py
Normal file
@@ -0,0 +1,182 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import get_current_user
|
||||
from app.db.session import get_db
|
||||
from app.models.bgp_anomaly import BGPAnomaly
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.models.user import User
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
BGP_SOURCES = ("ris_live_bgp", "bgpstream_bgp")
|
||||
|
||||
|
||||
def _parse_dt(value: Optional[str]) -> Optional[datetime]:
|
||||
if not value:
|
||||
return None
|
||||
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
|
||||
|
||||
def _matches_time(value: Optional[datetime], time_from: Optional[datetime], time_to: Optional[datetime]) -> bool:
|
||||
if value is None:
|
||||
return False
|
||||
if time_from and value < time_from:
|
||||
return False
|
||||
if time_to and value > time_to:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@router.get("/events")
|
||||
async def list_bgp_events(
|
||||
prefix: Optional[str] = Query(None),
|
||||
origin_asn: Optional[int] = Query(None),
|
||||
peer_asn: Optional[int] = Query(None),
|
||||
collector: Optional[str] = Query(None),
|
||||
event_type: Optional[str] = Query(None),
|
||||
source: Optional[str] = Query(None),
|
||||
time_from: Optional[str] = Query(None),
|
||||
time_to: Optional[str] = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=200),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source.in_(BGP_SOURCES))
|
||||
.order_by(CollectedData.reference_date.desc().nullslast(), CollectedData.id.desc())
|
||||
)
|
||||
if source:
|
||||
stmt = stmt.where(CollectedData.source == source)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
dt_from = _parse_dt(time_from)
|
||||
dt_to = _parse_dt(time_to)
|
||||
|
||||
filtered = []
|
||||
for record in records:
|
||||
metadata = record.extra_data or {}
|
||||
if prefix and metadata.get("prefix") != prefix:
|
||||
continue
|
||||
if origin_asn is not None and metadata.get("origin_asn") != origin_asn:
|
||||
continue
|
||||
if peer_asn is not None and metadata.get("peer_asn") != peer_asn:
|
||||
continue
|
||||
if collector and metadata.get("collector") != collector:
|
||||
continue
|
||||
if event_type and metadata.get("event_type") != event_type:
|
||||
continue
|
||||
if (dt_from or dt_to) and not _matches_time(record.reference_date, dt_from, dt_to):
|
||||
continue
|
||||
filtered.append(record)
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
return {
|
||||
"total": len(filtered),
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"data": [record.to_dict() for record in filtered[offset : offset + page_size]],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/events/{event_id}")
|
||||
async def get_bgp_event(
|
||||
event_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
record = await db.get(CollectedData, event_id)
|
||||
if not record or record.source not in BGP_SOURCES:
|
||||
raise HTTPException(status_code=404, detail="BGP event not found")
|
||||
return record.to_dict()
|
||||
|
||||
|
||||
@router.get("/anomalies")
|
||||
async def list_bgp_anomalies(
|
||||
severity: Optional[str] = Query(None),
|
||||
anomaly_type: Optional[str] = Query(None),
|
||||
status: Optional[str] = Query(None),
|
||||
prefix: Optional[str] = Query(None),
|
||||
origin_asn: Optional[int] = Query(None),
|
||||
time_from: Optional[str] = Query(None),
|
||||
time_to: Optional[str] = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=200),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
stmt = select(BGPAnomaly).order_by(BGPAnomaly.created_at.desc(), BGPAnomaly.id.desc())
|
||||
if severity:
|
||||
stmt = stmt.where(BGPAnomaly.severity == severity)
|
||||
if anomaly_type:
|
||||
stmt = stmt.where(BGPAnomaly.anomaly_type == anomaly_type)
|
||||
if status:
|
||||
stmt = stmt.where(BGPAnomaly.status == status)
|
||||
if prefix:
|
||||
stmt = stmt.where(BGPAnomaly.prefix == prefix)
|
||||
if origin_asn is not None:
|
||||
stmt = stmt.where(BGPAnomaly.origin_asn == origin_asn)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
dt_from = _parse_dt(time_from)
|
||||
dt_to = _parse_dt(time_to)
|
||||
if dt_from or dt_to:
|
||||
records = [record for record in records if _matches_time(record.created_at, dt_from, dt_to)]
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
return {
|
||||
"total": len(records),
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"data": [record.to_dict() for record in records[offset : offset + page_size]],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/anomalies/summary")
|
||||
async def get_bgp_anomaly_summary(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
total_result = await db.execute(select(func.count(BGPAnomaly.id)))
|
||||
type_result = await db.execute(
|
||||
select(BGPAnomaly.anomaly_type, func.count(BGPAnomaly.id))
|
||||
.group_by(BGPAnomaly.anomaly_type)
|
||||
.order_by(func.count(BGPAnomaly.id).desc())
|
||||
)
|
||||
severity_result = await db.execute(
|
||||
select(BGPAnomaly.severity, func.count(BGPAnomaly.id))
|
||||
.group_by(BGPAnomaly.severity)
|
||||
.order_by(func.count(BGPAnomaly.id).desc())
|
||||
)
|
||||
status_result = await db.execute(
|
||||
select(BGPAnomaly.status, func.count(BGPAnomaly.id))
|
||||
.group_by(BGPAnomaly.status)
|
||||
.order_by(func.count(BGPAnomaly.id).desc())
|
||||
)
|
||||
|
||||
return {
|
||||
"total": total_result.scalar() or 0,
|
||||
"by_type": {row[0]: row[1] for row in type_result.fetchall()},
|
||||
"by_severity": {row[0]: row[1] for row in severity_result.fetchall()},
|
||||
"by_status": {row[0]: row[1] for row in status_result.fetchall()},
|
||||
}
|
||||
|
||||
|
||||
@router.get("/anomalies/{anomaly_id}")
|
||||
async def get_bgp_anomaly(
|
||||
anomaly_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
record = await db.get(BGPAnomaly, anomaly_id)
|
||||
if not record:
|
||||
raise HTTPException(status_code=404, detail="BGP anomaly not found")
|
||||
return record.to_dict()
|
||||
@@ -7,16 +7,138 @@ import json
|
||||
import csv
|
||||
import io
|
||||
|
||||
from app.core.collected_data_fields import get_metadata_field
|
||||
from app.core.countries import COUNTRY_OPTIONS, get_country_search_variants, normalize_country
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.core.security import get_current_user
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.models.datasource import DataSource
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
COUNTRY_SQL = "metadata->>'country'"
|
||||
SEARCHABLE_SQL = [
|
||||
"name",
|
||||
"title",
|
||||
"description",
|
||||
"source",
|
||||
"data_type",
|
||||
"source_id",
|
||||
"metadata::text",
|
||||
]
|
||||
|
||||
|
||||
def parse_multi_values(value: Optional[str]) -> list[str]:
|
||||
if not value:
|
||||
return []
|
||||
return [item.strip() for item in value.split(",") if item.strip()]
|
||||
|
||||
|
||||
def build_in_condition(field_sql: str, values: list[str], param_prefix: str, params: dict) -> str:
|
||||
placeholders = []
|
||||
for index, value in enumerate(values):
|
||||
key = f"{param_prefix}_{index}"
|
||||
params[key] = value
|
||||
placeholders.append(f":{key}")
|
||||
return f"{field_sql} IN ({', '.join(placeholders)})"
|
||||
|
||||
|
||||
def build_search_condition(search: Optional[str], params: dict) -> Optional[str]:
|
||||
if not search:
|
||||
return None
|
||||
|
||||
normalized = search.strip()
|
||||
if not normalized:
|
||||
return None
|
||||
|
||||
search_terms = [normalized]
|
||||
for variant in get_country_search_variants(normalized):
|
||||
if variant.casefold() not in {term.casefold() for term in search_terms}:
|
||||
search_terms.append(variant)
|
||||
|
||||
conditions = []
|
||||
for index, term in enumerate(search_terms):
|
||||
params[f"search_{index}"] = f"%{term}%"
|
||||
conditions.extend(f"{field} ILIKE :search_{index}" for field in SEARCHABLE_SQL)
|
||||
|
||||
params["search_exact"] = normalized
|
||||
params["search_prefix"] = f"{normalized}%"
|
||||
|
||||
canonical_variants = get_country_search_variants(normalized)
|
||||
canonical = canonical_variants[0] if canonical_variants else None
|
||||
params["country_search_exact"] = canonical or normalized
|
||||
params["country_search_prefix"] = f"{(canonical or normalized)}%"
|
||||
|
||||
return "(" + " OR ".join(conditions) + ")"
|
||||
|
||||
|
||||
def build_search_rank_sql(search: Optional[str]) -> str:
|
||||
if not search or not search.strip():
|
||||
return "0"
|
||||
|
||||
return """
|
||||
CASE
|
||||
WHEN name ILIKE :search_exact THEN 700
|
||||
WHEN name ILIKE :search_prefix THEN 600
|
||||
WHEN title ILIKE :search_exact THEN 500
|
||||
WHEN title ILIKE :search_prefix THEN 400
|
||||
WHEN metadata->>'country' ILIKE :country_search_exact THEN 380
|
||||
WHEN metadata->>'country' ILIKE :country_search_prefix THEN 340
|
||||
WHEN source_id ILIKE :search_exact THEN 350
|
||||
WHEN source ILIKE :search_exact THEN 300
|
||||
WHEN data_type ILIKE :search_exact THEN 250
|
||||
WHEN description ILIKE :search_0 THEN 150
|
||||
WHEN metadata::text ILIKE :search_0 THEN 100
|
||||
WHEN title ILIKE :search_0 THEN 80
|
||||
WHEN name ILIKE :search_0 THEN 60
|
||||
WHEN source ILIKE :search_0 THEN 40
|
||||
WHEN data_type ILIKE :search_0 THEN 30
|
||||
WHEN source_id ILIKE :search_0 THEN 20
|
||||
ELSE 0
|
||||
END
|
||||
"""
|
||||
|
||||
|
||||
def serialize_collected_row(row, source_name_map: dict[str, str] | None = None) -> dict:
|
||||
metadata = row[7]
|
||||
source = row[1]
|
||||
return {
|
||||
"id": row[0],
|
||||
"source": source,
|
||||
"source_name": source_name_map.get(source, source) if source_name_map else source,
|
||||
"source_id": row[2],
|
||||
"data_type": row[3],
|
||||
"name": row[4],
|
||||
"title": row[5],
|
||||
"description": row[6],
|
||||
"country": get_metadata_field(metadata, "country"),
|
||||
"city": get_metadata_field(metadata, "city"),
|
||||
"latitude": get_metadata_field(metadata, "latitude"),
|
||||
"longitude": get_metadata_field(metadata, "longitude"),
|
||||
"value": get_metadata_field(metadata, "value"),
|
||||
"unit": get_metadata_field(metadata, "unit"),
|
||||
"metadata": metadata,
|
||||
"cores": get_metadata_field(metadata, "cores"),
|
||||
"rmax": get_metadata_field(metadata, "rmax"),
|
||||
"rpeak": get_metadata_field(metadata, "rpeak"),
|
||||
"power": get_metadata_field(metadata, "power"),
|
||||
"collected_at": to_iso8601_utc(row[8]),
|
||||
"reference_date": to_iso8601_utc(row[9]),
|
||||
"is_valid": row[10],
|
||||
}
|
||||
|
||||
|
||||
async def get_source_name_map(db: AsyncSession) -> dict[str, str]:
|
||||
result = await db.execute(select(DataSource.source, DataSource.name))
|
||||
return {row[0]: row[1] for row in result.fetchall()}
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_collected_data(
|
||||
mode: str = Query("current", description="查询模式: current/history"),
|
||||
source: Optional[str] = Query(None, description="数据源过滤"),
|
||||
data_type: Optional[str] = Query(None, description="数据类型过滤"),
|
||||
country: Optional[str] = Query(None, description="国家过滤"),
|
||||
@@ -27,25 +149,30 @@ async def list_collected_data(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""查询采集的数据列表"""
|
||||
normalized_country = normalize_country(country) if country else None
|
||||
source_values = parse_multi_values(source)
|
||||
data_type_values = parse_multi_values(data_type)
|
||||
|
||||
# Build WHERE clause
|
||||
conditions = []
|
||||
params = {}
|
||||
|
||||
if source:
|
||||
conditions.append("source = :source")
|
||||
params["source"] = source
|
||||
if data_type:
|
||||
conditions.append("data_type = :data_type")
|
||||
params["data_type"] = data_type
|
||||
if country:
|
||||
conditions.append("country = :country")
|
||||
params["country"] = country
|
||||
if search:
|
||||
conditions.append("(name ILIKE :search OR title ILIKE :search)")
|
||||
params["search"] = f"%{search}%"
|
||||
if mode != "history":
|
||||
conditions.append("COALESCE(is_current, TRUE) = TRUE")
|
||||
|
||||
if source_values:
|
||||
conditions.append(build_in_condition("source", source_values, "source", params))
|
||||
if data_type_values:
|
||||
conditions.append(build_in_condition("data_type", data_type_values, "data_type", params))
|
||||
if normalized_country:
|
||||
conditions.append(f"{COUNTRY_SQL} = :country")
|
||||
params["country"] = normalized_country
|
||||
search_condition = build_search_condition(search, params)
|
||||
if search_condition:
|
||||
conditions.append(search_condition)
|
||||
|
||||
where_sql = " AND ".join(conditions) if conditions else "1=1"
|
||||
search_rank_sql = build_search_rank_sql(search)
|
||||
|
||||
# Calculate offset
|
||||
offset = (page - 1) * page_size
|
||||
@@ -58,11 +185,11 @@ async def list_collected_data(
|
||||
# Query data
|
||||
query = text(f"""
|
||||
SELECT id, source, source_id, data_type, name, title, description,
|
||||
country, city, latitude, longitude, value, unit,
|
||||
metadata, collected_at, reference_date, is_valid
|
||||
metadata, collected_at, reference_date, is_valid,
|
||||
{search_rank_sql} AS search_rank
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
ORDER BY collected_at DESC
|
||||
ORDER BY search_rank DESC, collected_at DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
""")
|
||||
params["limit"] = page_size
|
||||
@@ -70,30 +197,11 @@ async def list_collected_data(
|
||||
|
||||
result = await db.execute(query, params)
|
||||
rows = result.fetchall()
|
||||
source_name_map = await get_source_name_map(db)
|
||||
|
||||
data = []
|
||||
for row in rows:
|
||||
data.append(
|
||||
{
|
||||
"id": row[0],
|
||||
"source": row[1],
|
||||
"source_id": row[2],
|
||||
"data_type": row[3],
|
||||
"name": row[4],
|
||||
"title": row[5],
|
||||
"description": row[6],
|
||||
"country": row[7],
|
||||
"city": row[8],
|
||||
"latitude": row[9],
|
||||
"longitude": row[10],
|
||||
"value": row[11],
|
||||
"unit": row[12],
|
||||
"metadata": row[13],
|
||||
"collected_at": row[14].isoformat() if row[14] else None,
|
||||
"reference_date": row[15].isoformat() if row[15] else None,
|
||||
"is_valid": row[16],
|
||||
}
|
||||
)
|
||||
data.append(serialize_collected_row(row[:11], source_name_map))
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
@@ -105,21 +213,39 @@ async def list_collected_data(
|
||||
|
||||
@router.get("/summary")
|
||||
async def get_data_summary(
|
||||
mode: str = Query("current", description="查询模式: current/history"),
|
||||
source: Optional[str] = Query(None, description="数据源过滤"),
|
||||
data_type: Optional[str] = Query(None, description="数据类型过滤"),
|
||||
country: Optional[str] = Query(None, description="国家过滤"),
|
||||
search: Optional[str] = Query(None, description="搜索名称"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取数据汇总统计"""
|
||||
where_sql, params = build_where_clause(source, data_type, country, search)
|
||||
if mode != "history":
|
||||
where_sql = f"({where_sql}) AND COALESCE(is_current, TRUE) = TRUE"
|
||||
|
||||
overall_where_sql = "COALESCE(is_current, TRUE) = TRUE" if mode != "history" else "1=1"
|
||||
|
||||
overall_total_result = await db.execute(
|
||||
text(f"SELECT COUNT(*) FROM collected_data WHERE {overall_where_sql}")
|
||||
)
|
||||
overall_total = overall_total_result.scalar() or 0
|
||||
|
||||
# By source and data_type
|
||||
result = await db.execute(
|
||||
text("""
|
||||
text(f"""
|
||||
SELECT source, data_type, COUNT(*) as count
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
GROUP BY source, data_type
|
||||
ORDER BY source, data_type
|
||||
""")
|
||||
"""),
|
||||
params,
|
||||
)
|
||||
rows = result.fetchall()
|
||||
source_name_map = await get_source_name_map(db)
|
||||
|
||||
by_source = {}
|
||||
total = 0
|
||||
@@ -128,31 +254,62 @@ async def get_data_summary(
|
||||
data_type = row[1]
|
||||
count = row[2]
|
||||
|
||||
if source not in by_source:
|
||||
by_source[source] = {}
|
||||
by_source[source][data_type] = count
|
||||
source_key = source_name_map.get(source, source)
|
||||
if source_key not in by_source:
|
||||
by_source[source_key] = {}
|
||||
by_source[source_key][data_type] = count
|
||||
total += count
|
||||
|
||||
# Total by source
|
||||
source_totals = await db.execute(
|
||||
text("""
|
||||
text(f"""
|
||||
SELECT source, COUNT(*) as count
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
GROUP BY source
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
"""),
|
||||
params,
|
||||
)
|
||||
source_rows = source_totals.fetchall()
|
||||
|
||||
type_totals = await db.execute(
|
||||
text(f"""
|
||||
SELECT data_type, COUNT(*) as count
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
GROUP BY data_type
|
||||
ORDER BY count DESC, data_type
|
||||
"""),
|
||||
params,
|
||||
)
|
||||
type_rows = type_totals.fetchall()
|
||||
|
||||
return {
|
||||
"total_records": total,
|
||||
"overall_total_records": overall_total,
|
||||
"by_source": by_source,
|
||||
"source_totals": [{"source": row[0], "count": row[1]} for row in source_rows],
|
||||
"source_totals": [
|
||||
{
|
||||
"source": row[0],
|
||||
"source_name": source_name_map.get(row[0], row[0]),
|
||||
"count": row[1],
|
||||
}
|
||||
for row in source_rows
|
||||
],
|
||||
"type_totals": [
|
||||
{
|
||||
"data_type": row[0],
|
||||
"count": row[1],
|
||||
}
|
||||
for row in type_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/sources")
|
||||
async def get_data_sources(
|
||||
mode: str = Query("current", description="查询模式: current/history"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
@@ -160,18 +317,25 @@ async def get_data_sources(
|
||||
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT DISTINCT source FROM collected_data ORDER BY source
|
||||
SELECT DISTINCT source FROM collected_data
|
||||
""" + ("WHERE COALESCE(is_current, TRUE) = TRUE " if mode != "history" else "") + """
|
||||
ORDER BY source
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
source_name_map = await get_source_name_map(db)
|
||||
|
||||
return {
|
||||
"sources": [row[0] for row in rows],
|
||||
"sources": [
|
||||
{"source": row[0], "source_name": source_name_map.get(row[0], row[0])}
|
||||
for row in rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/types")
|
||||
async def get_data_types(
|
||||
mode: str = Query("current", description="查询模式: current/history"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
@@ -179,7 +343,9 @@ async def get_data_types(
|
||||
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT DISTINCT data_type FROM collected_data ORDER BY data_type
|
||||
SELECT DISTINCT data_type FROM collected_data
|
||||
""" + ("WHERE COALESCE(is_current, TRUE) = TRUE " if mode != "history" else "") + """
|
||||
ORDER BY data_type
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
@@ -196,17 +362,8 @@ async def get_countries(
|
||||
):
|
||||
"""获取所有国家列表"""
|
||||
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT DISTINCT country FROM collected_data
|
||||
WHERE country IS NOT NULL AND country != ''
|
||||
ORDER BY country
|
||||
""")
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
return {
|
||||
"countries": [row[0] for row in rows],
|
||||
"countries": COUNTRY_OPTIONS,
|
||||
}
|
||||
|
||||
|
||||
@@ -221,7 +378,6 @@ async def get_collected_data(
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT id, source, source_id, data_type, name, title, description,
|
||||
country, city, latitude, longitude, value, unit,
|
||||
metadata, collected_at, reference_date, is_valid
|
||||
FROM collected_data
|
||||
WHERE id = :id
|
||||
@@ -236,25 +392,8 @@ async def get_collected_data(
|
||||
detail="数据不存在",
|
||||
)
|
||||
|
||||
return {
|
||||
"id": row[0],
|
||||
"source": row[1],
|
||||
"source_id": row[2],
|
||||
"data_type": row[3],
|
||||
"name": row[4],
|
||||
"title": row[5],
|
||||
"description": row[6],
|
||||
"country": row[7],
|
||||
"city": row[8],
|
||||
"latitude": row[9],
|
||||
"longitude": row[10],
|
||||
"value": row[11],
|
||||
"unit": row[12],
|
||||
"metadata": row[13],
|
||||
"collected_at": row[14].isoformat() if row[14] else None,
|
||||
"reference_date": row[15].isoformat() if row[15] else None,
|
||||
"is_valid": row[16],
|
||||
}
|
||||
source_name_map = await get_source_name_map(db)
|
||||
return serialize_collected_row(row, source_name_map)
|
||||
|
||||
|
||||
def build_where_clause(
|
||||
@@ -263,19 +402,21 @@ def build_where_clause(
|
||||
"""Build WHERE clause and params for queries"""
|
||||
conditions = []
|
||||
params = {}
|
||||
source_values = parse_multi_values(source)
|
||||
data_type_values = parse_multi_values(data_type)
|
||||
|
||||
if source:
|
||||
conditions.append("source = :source")
|
||||
params["source"] = source
|
||||
if data_type:
|
||||
conditions.append("data_type = :data_type")
|
||||
params["data_type"] = data_type
|
||||
if country:
|
||||
conditions.append("country = :country")
|
||||
params["country"] = country
|
||||
if search:
|
||||
conditions.append("(name ILIKE :search OR title ILIKE :search)")
|
||||
params["search"] = f"%{search}%"
|
||||
if source_values:
|
||||
conditions.append(build_in_condition("source", source_values, "source", params))
|
||||
if data_type_values:
|
||||
conditions.append(build_in_condition("data_type", data_type_values, "data_type", params))
|
||||
normalized_country = normalize_country(country) if country else None
|
||||
|
||||
if normalized_country:
|
||||
conditions.append(f"{COUNTRY_SQL} = :country")
|
||||
params["country"] = normalized_country
|
||||
search_condition = build_search_condition(search, params)
|
||||
if search_condition:
|
||||
conditions.append(search_condition)
|
||||
|
||||
where_sql = " AND ".join(conditions) if conditions else "1=1"
|
||||
return where_sql, params
|
||||
@@ -283,6 +424,7 @@ def build_where_clause(
|
||||
|
||||
@router.get("/export/json")
|
||||
async def export_json(
|
||||
mode: str = Query("current", description="查询模式: current/history"),
|
||||
source: Optional[str] = Query(None, description="数据源过滤"),
|
||||
data_type: Optional[str] = Query(None, description="数据类型过滤"),
|
||||
country: Optional[str] = Query(None, description="国家过滤"),
|
||||
@@ -294,11 +436,12 @@ async def export_json(
|
||||
"""导出数据为 JSON 格式"""
|
||||
|
||||
where_sql, params = build_where_clause(source, data_type, country, search)
|
||||
if mode != "history":
|
||||
where_sql = f"({where_sql}) AND COALESCE(is_current, TRUE) = TRUE"
|
||||
params["limit"] = limit
|
||||
|
||||
query = text(f"""
|
||||
SELECT id, source, source_id, data_type, name, title, description,
|
||||
country, city, latitude, longitude, value, unit,
|
||||
metadata, collected_at, reference_date, is_valid
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
@@ -311,27 +454,7 @@ async def export_json(
|
||||
|
||||
data = []
|
||||
for row in rows:
|
||||
data.append(
|
||||
{
|
||||
"id": row[0],
|
||||
"source": row[1],
|
||||
"source_id": row[2],
|
||||
"data_type": row[3],
|
||||
"name": row[4],
|
||||
"title": row[5],
|
||||
"description": row[6],
|
||||
"country": row[7],
|
||||
"city": row[8],
|
||||
"latitude": row[9],
|
||||
"longitude": row[10],
|
||||
"value": row[11],
|
||||
"unit": row[12],
|
||||
"metadata": row[13],
|
||||
"collected_at": row[14].isoformat() if row[14] else None,
|
||||
"reference_date": row[15].isoformat() if row[15] else None,
|
||||
"is_valid": row[16],
|
||||
}
|
||||
)
|
||||
data.append(serialize_collected_row(row))
|
||||
|
||||
json_str = json.dumps({"data": data, "total": len(data)}, ensure_ascii=False, indent=2)
|
||||
|
||||
@@ -346,6 +469,7 @@ async def export_json(
|
||||
|
||||
@router.get("/export/csv")
|
||||
async def export_csv(
|
||||
mode: str = Query("current", description="查询模式: current/history"),
|
||||
source: Optional[str] = Query(None, description="数据源过滤"),
|
||||
data_type: Optional[str] = Query(None, description="数据类型过滤"),
|
||||
country: Optional[str] = Query(None, description="国家过滤"),
|
||||
@@ -357,11 +481,12 @@ async def export_csv(
|
||||
"""导出数据为 CSV 格式"""
|
||||
|
||||
where_sql, params = build_where_clause(source, data_type, country, search)
|
||||
if mode != "history":
|
||||
where_sql = f"({where_sql}) AND COALESCE(is_current, TRUE) = TRUE"
|
||||
params["limit"] = limit
|
||||
|
||||
query = text(f"""
|
||||
SELECT id, source, source_id, data_type, name, title, description,
|
||||
country, city, latitude, longitude, value, unit,
|
||||
metadata, collected_at, reference_date, is_valid
|
||||
FROM collected_data
|
||||
WHERE {where_sql}
|
||||
@@ -409,16 +534,16 @@ async def export_csv(
|
||||
row[4],
|
||||
row[5],
|
||||
row[6],
|
||||
row[7],
|
||||
row[8],
|
||||
row[9],
|
||||
get_metadata_field(row[7], "country"),
|
||||
get_metadata_field(row[7], "city"),
|
||||
get_metadata_field(row[7], "latitude"),
|
||||
get_metadata_field(row[7], "longitude"),
|
||||
get_metadata_field(row[7], "value"),
|
||||
get_metadata_field(row[7], "unit"),
|
||||
json.dumps(row[7]) if row[7] else "",
|
||||
to_iso8601_utc(row[8]) or "",
|
||||
to_iso8601_utc(row[9]) or "",
|
||||
row[10],
|
||||
row[11],
|
||||
row[12],
|
||||
json.dumps(row[13]) if row[13] else "",
|
||||
row[14].isoformat() if row[14] else "",
|
||||
row[15].isoformat() if row[15] else "",
|
||||
row[16],
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Dashboard API with caching and optimizations"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy import select, func, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
@@ -13,6 +13,7 @@ from app.models.alert import Alert, AlertSeverity
|
||||
from app.models.task import CollectionTask
|
||||
from app.core.security import get_current_user
|
||||
from app.core.cache import cache
|
||||
from app.core.time import to_iso8601_utc
|
||||
|
||||
|
||||
# Built-in collectors info (mirrored from datasources.py)
|
||||
@@ -111,7 +112,7 @@ async def get_stats(
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
today_start = datetime.now(UTC).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
# Count built-in collectors
|
||||
built_in_count = len(COLLECTOR_INFO)
|
||||
@@ -175,7 +176,7 @@ async def get_stats(
|
||||
"active_datasources": active_datasources,
|
||||
"tasks_today": tasks_today,
|
||||
"success_rate": round(success_rate, 1),
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
"last_updated": to_iso8601_utc(datetime.now(UTC)),
|
||||
"alerts": {
|
||||
"critical": critical_alerts,
|
||||
"warning": warning_alerts,
|
||||
@@ -230,10 +231,10 @@ async def get_summary(
|
||||
summary[module] = {
|
||||
"datasources": data["datasources"],
|
||||
"total_records": 0, # Built-in don't track this in dashboard stats
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
"last_updated": to_iso8601_utc(datetime.now(UTC)),
|
||||
}
|
||||
|
||||
response = {"modules": summary, "last_updated": datetime.utcnow().isoformat()}
|
||||
response = {"modules": summary, "last_updated": to_iso8601_utc(datetime.now(UTC))}
|
||||
|
||||
cache.set(cache_key, response, expire_seconds=300)
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from app.models.user import User
|
||||
from app.models.datasource_config import DataSourceConfig
|
||||
from app.core.security import get_current_user
|
||||
from app.core.cache import cache
|
||||
from app.core.time import to_iso8601_utc
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -123,8 +124,8 @@ async def list_configs(
|
||||
"headers": c.headers,
|
||||
"config": c.config,
|
||||
"is_active": c.is_active,
|
||||
"created_at": c.created_at.isoformat() if c.created_at else None,
|
||||
"updated_at": c.updated_at.isoformat() if c.updated_at else None,
|
||||
"created_at": to_iso8601_utc(c.created_at),
|
||||
"updated_at": to_iso8601_utc(c.updated_at),
|
||||
}
|
||||
for c in configs
|
||||
],
|
||||
@@ -155,8 +156,8 @@ async def get_config(
|
||||
"headers": config.headers,
|
||||
"config": config.config,
|
||||
"is_active": config.is_active,
|
||||
"created_at": config.created_at.isoformat() if config.created_at else None,
|
||||
"updated_at": config.updated_at.isoformat() if config.updated_at else None,
|
||||
"created_at": to_iso8601_utc(config.created_at),
|
||||
"updated_at": to_iso8601_utc(config.updated_at),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,141 +1,77 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import select, func
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.core.security import get_current_user
|
||||
from app.core.data_sources import get_data_sources_config
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.task import CollectionTask
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.core.security import get_current_user
|
||||
from app.services.collectors.registry import collector_registry
|
||||
from app.models.user import User
|
||||
from app.services.scheduler import get_latest_task_id_for_datasource, run_collector_now, sync_datasource_job
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
COLLECTOR_INFO = {
|
||||
"top500": {
|
||||
"id": 1,
|
||||
"name": "TOP500 Supercomputers",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_hours": 4,
|
||||
},
|
||||
"epoch_ai_gpu": {
|
||||
"id": 2,
|
||||
"name": "Epoch AI GPU Clusters",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_hours": 6,
|
||||
},
|
||||
"huggingface_models": {
|
||||
"id": 3,
|
||||
"name": "HuggingFace Models",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 12,
|
||||
},
|
||||
"huggingface_datasets": {
|
||||
"id": 4,
|
||||
"name": "HuggingFace Datasets",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 12,
|
||||
},
|
||||
"huggingface_spaces": {
|
||||
"id": 5,
|
||||
"name": "HuggingFace Spaces",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"peeringdb_ixp": {
|
||||
"id": 6,
|
||||
"name": "PeeringDB IXP",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 24,
|
||||
},
|
||||
"peeringdb_network": {
|
||||
"id": 7,
|
||||
"name": "PeeringDB Networks",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 48,
|
||||
},
|
||||
"peeringdb_facility": {
|
||||
"id": 8,
|
||||
"name": "PeeringDB Facilities",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 48,
|
||||
},
|
||||
"telegeography_cables": {
|
||||
"id": 9,
|
||||
"name": "Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"telegeography_landing": {
|
||||
"id": 10,
|
||||
"name": "Cable Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"telegeography_systems": {
|
||||
"id": 11,
|
||||
"name": "Cable Systems",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"arcgis_cables": {
|
||||
"id": 15,
|
||||
"name": "ArcGIS Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"arcgis_landing_points": {
|
||||
"id": 16,
|
||||
"name": "ArcGIS Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"arcgis_cable_landing_relation": {
|
||||
"id": 17,
|
||||
"name": "ArcGIS Cable-Landing Relations",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
"fao_landing_points": {
|
||||
"id": 18,
|
||||
"name": "FAO Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_hours": 168,
|
||||
},
|
||||
}
|
||||
|
||||
ID_TO_COLLECTOR = {info["id"]: name for name, info in COLLECTOR_INFO.items()}
|
||||
COLLECTOR_TO_ID = {name: info["id"] for name, info in COLLECTOR_INFO.items()}
|
||||
def format_frequency_label(minutes: int) -> str:
|
||||
if minutes % 1440 == 0:
|
||||
return f"{minutes // 1440}d"
|
||||
if minutes % 60 == 0:
|
||||
return f"{minutes // 60}h"
|
||||
return f"{minutes}m"
|
||||
|
||||
|
||||
def get_collector_name(source_id: str) -> Optional[str]:
|
||||
def is_due_for_collection(datasource: DataSource, now: datetime) -> bool:
|
||||
if datasource.last_run_at is None:
|
||||
return True
|
||||
return datasource.last_run_at + timedelta(minutes=datasource.frequency_minutes) <= now
|
||||
|
||||
|
||||
async def get_datasource_record(db: AsyncSession, source_id: str) -> Optional[DataSource]:
|
||||
datasource = None
|
||||
try:
|
||||
numeric_id = int(source_id)
|
||||
if numeric_id in ID_TO_COLLECTOR:
|
||||
return ID_TO_COLLECTOR[numeric_id]
|
||||
datasource = await db.get(DataSource, int(source_id))
|
||||
except ValueError:
|
||||
pass
|
||||
if source_id in COLLECTOR_INFO:
|
||||
return source_id
|
||||
return None
|
||||
|
||||
if datasource is not None:
|
||||
return datasource
|
||||
|
||||
result = await db.execute(
|
||||
select(DataSource).where(
|
||||
(DataSource.source == source_id) | (DataSource.collector_class == source_id)
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def get_last_completed_task(db: AsyncSession, datasource_id: int) -> Optional[CollectionTask]:
|
||||
result = await db.execute(
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == datasource_id)
|
||||
.where(CollectionTask.completed_at.isnot(None))
|
||||
.where(CollectionTask.status.in_(("success", "failed", "cancelled")))
|
||||
.order_by(CollectionTask.completed_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def get_running_task(db: AsyncSession, datasource_id: int) -> Optional[CollectionTask]:
|
||||
result = await db.execute(
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == datasource_id)
|
||||
.where(CollectionTask.status == "running")
|
||||
.order_by(CollectionTask.started_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
@router.get("")
|
||||
@@ -146,80 +82,156 @@ async def list_datasources(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
query = select(DataSource)
|
||||
|
||||
filters = []
|
||||
query = select(DataSource).order_by(DataSource.module, DataSource.id)
|
||||
if module:
|
||||
filters.append(DataSource.module == module)
|
||||
query = query.where(DataSource.module == module)
|
||||
if is_active is not None:
|
||||
filters.append(DataSource.is_active == is_active)
|
||||
query = query.where(DataSource.is_active == is_active)
|
||||
if priority:
|
||||
filters.append(DataSource.priority == priority)
|
||||
|
||||
if filters:
|
||||
query = query.where(*filters)
|
||||
query = query.where(DataSource.priority == priority)
|
||||
|
||||
result = await db.execute(query)
|
||||
datasources = result.scalars().all()
|
||||
|
||||
collector_list = []
|
||||
for name, info in COLLECTOR_INFO.items():
|
||||
is_active_status = collector_registry.is_active(name)
|
||||
|
||||
running_task_query = (
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == info["id"])
|
||||
.where(CollectionTask.status == "running")
|
||||
.order_by(CollectionTask.started_at.desc())
|
||||
.limit(1)
|
||||
config = get_data_sources_config()
|
||||
for datasource in datasources:
|
||||
running_task = await get_running_task(db, datasource.id)
|
||||
last_task = await get_last_completed_task(db, datasource.id)
|
||||
endpoint = await config.get_url(datasource.source, db)
|
||||
data_count_result = await db.execute(
|
||||
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
|
||||
)
|
||||
running_result = await db.execute(running_task_query)
|
||||
running_task = running_result.scalar_one_or_none()
|
||||
|
||||
last_run_query = (
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == info["id"])
|
||||
.where(CollectionTask.completed_at.isnot(None))
|
||||
.order_by(CollectionTask.completed_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
last_run_result = await db.execute(last_run_query)
|
||||
last_task = last_run_result.scalar_one_or_none()
|
||||
|
||||
data_count_query = select(func.count(CollectedData.id)).where(CollectedData.source == name)
|
||||
data_count_result = await db.execute(data_count_query)
|
||||
data_count = data_count_result.scalar() or 0
|
||||
|
||||
last_run = None
|
||||
if last_task and last_task.completed_at and data_count > 0:
|
||||
last_run = last_task.completed_at.strftime("%Y-%m-%d %H:%M")
|
||||
last_run_at = datasource.last_run_at or (last_task.completed_at if last_task else None)
|
||||
last_run = to_iso8601_utc(last_run_at)
|
||||
last_status = datasource.last_status or (last_task.status if last_task else None)
|
||||
|
||||
collector_list.append(
|
||||
{
|
||||
"id": info["id"],
|
||||
"name": info["name"],
|
||||
"module": info["module"],
|
||||
"priority": info["priority"],
|
||||
"frequency": f"{info['frequency_hours']}h",
|
||||
"is_active": is_active_status,
|
||||
"collector_class": name,
|
||||
"id": datasource.id,
|
||||
"name": datasource.name,
|
||||
"module": datasource.module,
|
||||
"priority": datasource.priority,
|
||||
"frequency": format_frequency_label(datasource.frequency_minutes),
|
||||
"frequency_minutes": datasource.frequency_minutes,
|
||||
"is_active": datasource.is_active,
|
||||
"collector_class": datasource.collector_class,
|
||||
"endpoint": endpoint,
|
||||
"last_run": last_run,
|
||||
"last_run_at": to_iso8601_utc(last_run_at),
|
||||
"last_status": last_status,
|
||||
"last_records_processed": last_task.records_processed if last_task else None,
|
||||
"data_count": data_count,
|
||||
"is_running": running_task is not None,
|
||||
"task_id": running_task.id if running_task else None,
|
||||
"progress": running_task.progress if running_task else None,
|
||||
"phase": running_task.phase if running_task else None,
|
||||
"records_processed": running_task.records_processed if running_task else None,
|
||||
"total_records": running_task.total_records if running_task else None,
|
||||
}
|
||||
)
|
||||
|
||||
if module:
|
||||
collector_list = [c for c in collector_list if c["module"] == module]
|
||||
if priority:
|
||||
collector_list = [c for c in collector_list if c["priority"] == priority]
|
||||
return {"total": len(collector_list), "data": collector_list}
|
||||
|
||||
|
||||
@router.post("/trigger-all")
|
||||
async def trigger_all_datasources(
|
||||
force: bool = Query(False),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(
|
||||
select(DataSource)
|
||||
.where(DataSource.is_active == True)
|
||||
.order_by(DataSource.module, DataSource.id)
|
||||
)
|
||||
datasources = result.scalars().all()
|
||||
|
||||
if not datasources:
|
||||
return {
|
||||
"status": "noop",
|
||||
"message": "No active data sources to trigger",
|
||||
"triggered": [],
|
||||
"skipped": [],
|
||||
"failed": [],
|
||||
}
|
||||
|
||||
previous_task_ids: dict[int, Optional[int]] = {}
|
||||
triggered_sources: list[dict] = []
|
||||
skipped_sources: list[dict] = []
|
||||
failed_sources: list[dict] = []
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
for datasource in datasources:
|
||||
running_task = await get_running_task(db, datasource.id)
|
||||
if running_task is not None:
|
||||
skipped_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "already_running",
|
||||
"task_id": running_task.id,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
if not force and not is_due_for_collection(datasource, now):
|
||||
skipped_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "within_frequency_window",
|
||||
"last_run_at": to_iso8601_utc(datasource.last_run_at),
|
||||
"next_run_at": to_iso8601_utc(
|
||||
datasource.last_run_at + timedelta(minutes=datasource.frequency_minutes)
|
||||
),
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
previous_task_ids[datasource.id] = await get_latest_task_id_for_datasource(datasource.id)
|
||||
success = run_collector_now(datasource.source)
|
||||
if not success:
|
||||
failed_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"reason": "trigger_failed",
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
triggered_sources.append(
|
||||
{
|
||||
"id": datasource.id,
|
||||
"source": datasource.source,
|
||||
"name": datasource.name,
|
||||
"task_id": None,
|
||||
}
|
||||
)
|
||||
|
||||
for _ in range(20):
|
||||
await asyncio.sleep(0.1)
|
||||
pending = [item for item in triggered_sources if item["task_id"] is None]
|
||||
if not pending:
|
||||
break
|
||||
for item in pending:
|
||||
task_id = await get_latest_task_id_for_datasource(item["id"])
|
||||
if task_id is not None and task_id != previous_task_ids.get(item["id"]):
|
||||
item["task_id"] = task_id
|
||||
|
||||
return {
|
||||
"total": len(collector_list),
|
||||
"data": collector_list,
|
||||
"status": "triggered" if triggered_sources else "partial",
|
||||
"message": f"Triggered {len(triggered_sources)} data sources",
|
||||
"force": force,
|
||||
"triggered": triggered_sources,
|
||||
"skipped": skipped_sources,
|
||||
"failed": failed_sources,
|
||||
}
|
||||
|
||||
|
||||
@@ -229,19 +241,24 @@ async def get_datasource(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
config = get_data_sources_config()
|
||||
endpoint = await config.get_url(datasource.source, db)
|
||||
|
||||
return {
|
||||
"id": info["id"],
|
||||
"name": info["name"],
|
||||
"module": info["module"],
|
||||
"priority": info["priority"],
|
||||
"frequency": f"{info['frequency_hours']}h",
|
||||
"collector_class": collector_name,
|
||||
"is_active": collector_registry.is_active(collector_name),
|
||||
"id": datasource.id,
|
||||
"name": datasource.name,
|
||||
"module": datasource.module,
|
||||
"priority": datasource.priority,
|
||||
"frequency": format_frequency_label(datasource.frequency_minutes),
|
||||
"frequency_minutes": datasource.frequency_minutes,
|
||||
"collector_class": datasource.collector_class,
|
||||
"source": datasource.source,
|
||||
"endpoint": endpoint,
|
||||
"is_active": datasource.is_active,
|
||||
}
|
||||
|
||||
|
||||
@@ -249,24 +266,32 @@ async def get_datasource(
|
||||
async def enable_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
collector_registry.set_active(collector_name, True)
|
||||
return {"status": "enabled", "source_id": source_id}
|
||||
|
||||
datasource.is_active = True
|
||||
await db.commit()
|
||||
await sync_datasource_job(datasource.id)
|
||||
return {"status": "enabled", "source_id": datasource.id}
|
||||
|
||||
|
||||
@router.post("/{source_id}/disable")
|
||||
async def disable_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
collector_registry.set_active(collector_name, False)
|
||||
return {"status": "disabled", "source_id": source_id}
|
||||
|
||||
datasource.is_active = False
|
||||
await db.commit()
|
||||
await sync_datasource_job(datasource.id)
|
||||
return {"status": "disabled", "source_id": datasource.id}
|
||||
|
||||
|
||||
@router.get("/{source_id}/stats")
|
||||
@@ -275,26 +300,19 @@ async def get_datasource_stats(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
source_name = info["name"]
|
||||
|
||||
query = select(func.count(CollectedData.id)).where(CollectedData.source == collector_name)
|
||||
result = await db.execute(query)
|
||||
total = result.scalar() or 0
|
||||
|
||||
if total == 0:
|
||||
query = select(func.count(CollectedData.id)).where(CollectedData.source == source_name)
|
||||
result = await db.execute(query)
|
||||
result = await db.execute(
|
||||
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
|
||||
)
|
||||
total = result.scalar() or 0
|
||||
|
||||
return {
|
||||
"source_id": source_id,
|
||||
"collector_name": collector_name,
|
||||
"name": info["name"],
|
||||
"source_id": datasource.id,
|
||||
"collector_name": datasource.collector_class,
|
||||
"name": datasource.name,
|
||||
"total_records": total,
|
||||
}
|
||||
|
||||
@@ -303,30 +321,36 @@ async def get_datasource_stats(
|
||||
async def trigger_datasource(
|
||||
source_id: str,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
from app.services.scheduler import run_collector_now
|
||||
|
||||
if not collector_registry.is_active(collector_name):
|
||||
if not datasource.is_active:
|
||||
raise HTTPException(status_code=400, detail="Data source is disabled")
|
||||
|
||||
success = run_collector_now(collector_name)
|
||||
previous_task_id = await get_latest_task_id_for_datasource(datasource.id)
|
||||
success = run_collector_now(datasource.source)
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to trigger collector '{datasource.source}'")
|
||||
|
||||
task_id = None
|
||||
for _ in range(20):
|
||||
await asyncio.sleep(0.1)
|
||||
task_id = await get_latest_task_id_for_datasource(datasource.id)
|
||||
if task_id is not None and task_id != previous_task_id:
|
||||
break
|
||||
if task_id == previous_task_id:
|
||||
task_id = None
|
||||
|
||||
if success:
|
||||
return {
|
||||
"status": "triggered",
|
||||
"source_id": source_id,
|
||||
"collector_name": collector_name,
|
||||
"message": f"Collector '{collector_name}' has been triggered",
|
||||
"source_id": datasource.id,
|
||||
"task_id": task_id,
|
||||
"collector_name": datasource.source,
|
||||
"message": f"Collector '{datasource.source}' has been triggered",
|
||||
}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger collector '{collector_name}'",
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{source_id}/data")
|
||||
@@ -335,39 +359,25 @@ async def clear_datasource_data(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
source_name = info["name"]
|
||||
|
||||
query = select(func.count(CollectedData.id)).where(CollectedData.source == collector_name)
|
||||
result = await db.execute(query)
|
||||
result = await db.execute(
|
||||
select(func.count(CollectedData.id)).where(CollectedData.source == datasource.source)
|
||||
)
|
||||
count = result.scalar() or 0
|
||||
|
||||
if count == 0:
|
||||
query = select(func.count(CollectedData.id)).where(CollectedData.source == source_name)
|
||||
result = await db.execute(query)
|
||||
count = result.scalar() or 0
|
||||
delete_source = source_name
|
||||
else:
|
||||
delete_source = collector_name
|
||||
return {"status": "success", "message": "No data to clear", "deleted_count": 0}
|
||||
|
||||
if count == 0:
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "No data to clear",
|
||||
"deleted_count": 0,
|
||||
}
|
||||
|
||||
delete_query = CollectedData.__table__.delete().where(CollectedData.source == delete_source)
|
||||
delete_query = CollectedData.__table__.delete().where(CollectedData.source == datasource.source)
|
||||
await db.execute(delete_query)
|
||||
await db.commit()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Cleared {count} records for data source '{info['name']}'",
|
||||
"message": f"Cleared {count} records for data source '{datasource.name}'",
|
||||
"deleted_count": count,
|
||||
}
|
||||
|
||||
@@ -375,32 +385,29 @@ async def clear_datasource_data(
|
||||
@router.get("/{source_id}/task-status")
|
||||
async def get_task_status(
|
||||
source_id: str,
|
||||
task_id: Optional[int] = None,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
collector_name = get_collector_name(source_id)
|
||||
if not collector_name:
|
||||
datasource = await get_datasource_record(db, source_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
info = COLLECTOR_INFO[collector_name]
|
||||
if task_id is not None:
|
||||
task = await db.get(CollectionTask, task_id)
|
||||
if not task or task.datasource_id != datasource.id:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
else:
|
||||
task = await get_running_task(db, datasource.id)
|
||||
|
||||
running_task_query = (
|
||||
select(CollectionTask)
|
||||
.where(CollectionTask.datasource_id == info["id"])
|
||||
.where(CollectionTask.status == "running")
|
||||
.order_by(CollectionTask.started_at.desc())
|
||||
.limit(1)
|
||||
)
|
||||
running_result = await db.execute(running_task_query)
|
||||
running_task = running_result.scalar_one_or_none()
|
||||
|
||||
if not running_task:
|
||||
return {"is_running": False, "task_id": None, "progress": None}
|
||||
if not task:
|
||||
return {"is_running": False, "task_id": None, "progress": None, "phase": None, "status": "idle"}
|
||||
|
||||
return {
|
||||
"is_running": True,
|
||||
"task_id": running_task.id,
|
||||
"progress": running_task.progress,
|
||||
"records_processed": running_task.records_processed,
|
||||
"total_records": running_task.total_records,
|
||||
"status": running_task.status,
|
||||
"is_running": task.status == "running",
|
||||
"task_id": task.id,
|
||||
"progress": task.progress,
|
||||
"phase": task.phase,
|
||||
"records_processed": task.records_processed,
|
||||
"total_records": task.total_records,
|
||||
"status": task.status,
|
||||
}
|
||||
|
||||
@@ -1,13 +1,22 @@
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel, EmailStr
|
||||
|
||||
from app.models.user import User
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel, EmailStr, Field
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import get_current_user
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import get_db
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.system_setting import SystemSetting
|
||||
from app.models.user import User
|
||||
from app.services.scheduler import sync_datasource_job
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
default_settings = {
|
||||
DEFAULT_SETTINGS = {
|
||||
"system": {
|
||||
"system_name": "智能星球",
|
||||
"refresh_interval": 60,
|
||||
@@ -29,17 +38,13 @@ default_settings = {
|
||||
},
|
||||
}
|
||||
|
||||
system_settings = default_settings["system"].copy()
|
||||
notification_settings = default_settings["notifications"].copy()
|
||||
security_settings = default_settings["security"].copy()
|
||||
|
||||
|
||||
class SystemSettingsUpdate(BaseModel):
|
||||
system_name: str = "智能星球"
|
||||
refresh_interval: int = 60
|
||||
refresh_interval: int = Field(default=60, ge=10, le=3600)
|
||||
auto_refresh: bool = True
|
||||
data_retention_days: int = 30
|
||||
max_concurrent_tasks: int = 5
|
||||
data_retention_days: int = Field(default=30, ge=1, le=3650)
|
||||
max_concurrent_tasks: int = Field(default=5, ge=1, le=50)
|
||||
|
||||
|
||||
class NotificationSettingsUpdate(BaseModel):
|
||||
@@ -51,60 +56,166 @@ class NotificationSettingsUpdate(BaseModel):
|
||||
|
||||
|
||||
class SecuritySettingsUpdate(BaseModel):
|
||||
session_timeout: int = 60
|
||||
max_login_attempts: int = 5
|
||||
password_policy: str = "medium"
|
||||
session_timeout: int = Field(default=60, ge=5, le=1440)
|
||||
max_login_attempts: int = Field(default=5, ge=1, le=20)
|
||||
password_policy: str = Field(default="medium")
|
||||
|
||||
|
||||
class CollectorSettingsUpdate(BaseModel):
|
||||
is_active: bool
|
||||
priority: str = Field(default="P1")
|
||||
frequency_minutes: int = Field(default=60, ge=1, le=10080)
|
||||
|
||||
|
||||
def merge_with_defaults(category: str, payload: Optional[dict]) -> dict:
|
||||
merged = DEFAULT_SETTINGS[category].copy()
|
||||
if payload:
|
||||
merged.update(payload)
|
||||
return merged
|
||||
|
||||
|
||||
async def get_setting_record(db: AsyncSession, category: str) -> Optional[SystemSetting]:
|
||||
result = await db.execute(select(SystemSetting).where(SystemSetting.category == category))
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def get_setting_payload(db: AsyncSession, category: str) -> dict:
|
||||
record = await get_setting_record(db, category)
|
||||
return merge_with_defaults(category, record.payload if record else None)
|
||||
|
||||
|
||||
async def save_setting_payload(db: AsyncSession, category: str, payload: dict) -> dict:
|
||||
record = await get_setting_record(db, category)
|
||||
if record is None:
|
||||
record = SystemSetting(category=category, payload=payload)
|
||||
db.add(record)
|
||||
else:
|
||||
record.payload = payload
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(record)
|
||||
return merge_with_defaults(category, record.payload)
|
||||
|
||||
|
||||
def format_frequency_label(minutes: int) -> str:
|
||||
if minutes % 1440 == 0:
|
||||
return f"{minutes // 1440}d"
|
||||
if minutes % 60 == 0:
|
||||
return f"{minutes // 60}h"
|
||||
return f"{minutes}m"
|
||||
|
||||
|
||||
def serialize_collector(datasource: DataSource) -> dict:
|
||||
return {
|
||||
"id": datasource.id,
|
||||
"name": datasource.name,
|
||||
"source": datasource.source,
|
||||
"module": datasource.module,
|
||||
"priority": datasource.priority,
|
||||
"frequency_minutes": datasource.frequency_minutes,
|
||||
"frequency": format_frequency_label(datasource.frequency_minutes),
|
||||
"is_active": datasource.is_active,
|
||||
"last_run_at": to_iso8601_utc(datasource.last_run_at),
|
||||
"last_status": datasource.last_status,
|
||||
"next_run_at": to_iso8601_utc(datasource.next_run_at),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/system")
|
||||
async def get_system_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"system": system_settings}
|
||||
async def get_system_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
return {"system": await get_setting_payload(db, "system")}
|
||||
|
||||
|
||||
@router.put("/system")
|
||||
async def update_system_settings(
|
||||
settings: SystemSettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
global system_settings
|
||||
system_settings = settings.model_dump()
|
||||
return {"status": "updated", "system": system_settings}
|
||||
payload = await save_setting_payload(db, "system", settings.model_dump())
|
||||
return {"status": "updated", "system": payload}
|
||||
|
||||
|
||||
@router.get("/notifications")
|
||||
async def get_notification_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"notifications": notification_settings}
|
||||
async def get_notification_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
return {"notifications": await get_setting_payload(db, "notifications")}
|
||||
|
||||
|
||||
@router.put("/notifications")
|
||||
async def update_notification_settings(
|
||||
settings: NotificationSettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
global notification_settings
|
||||
notification_settings = settings.model_dump()
|
||||
return {"status": "updated", "notifications": notification_settings}
|
||||
payload = await save_setting_payload(db, "notifications", settings.model_dump())
|
||||
return {"status": "updated", "notifications": payload}
|
||||
|
||||
|
||||
@router.get("/security")
|
||||
async def get_security_settings(current_user: User = Depends(get_current_user)):
|
||||
return {"security": security_settings}
|
||||
async def get_security_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
return {"security": await get_setting_payload(db, "security")}
|
||||
|
||||
|
||||
@router.put("/security")
|
||||
async def update_security_settings(
|
||||
settings: SecuritySettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
global security_settings
|
||||
security_settings = settings.model_dump()
|
||||
return {"status": "updated", "security": security_settings}
|
||||
payload = await save_setting_payload(db, "security", settings.model_dump())
|
||||
return {"status": "updated", "security": payload}
|
||||
|
||||
|
||||
@router.get("/collectors")
|
||||
async def get_collector_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(DataSource).order_by(DataSource.module, DataSource.id))
|
||||
datasources = result.scalars().all()
|
||||
return {"collectors": [serialize_collector(datasource) for datasource in datasources]}
|
||||
|
||||
|
||||
@router.put("/collectors/{datasource_id}")
|
||||
async def update_collector_settings(
|
||||
datasource_id: int,
|
||||
settings: CollectorSettingsUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
datasource = await db.get(DataSource, datasource_id)
|
||||
if not datasource:
|
||||
raise HTTPException(status_code=404, detail="Data source not found")
|
||||
|
||||
datasource.is_active = settings.is_active
|
||||
datasource.priority = settings.priority
|
||||
datasource.frequency_minutes = settings.frequency_minutes
|
||||
await db.commit()
|
||||
await db.refresh(datasource)
|
||||
await sync_datasource_job(datasource.id)
|
||||
return {"status": "updated", "collector": serialize_collector(datasource)}
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def get_all_settings(current_user: User = Depends(get_current_user)):
|
||||
async def get_all_settings(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(DataSource).order_by(DataSource.module, DataSource.id))
|
||||
datasources = result.scalars().all()
|
||||
return {
|
||||
"system": system_settings,
|
||||
"notifications": notification_settings,
|
||||
"security": security_settings,
|
||||
"system": await get_setting_payload(db, "system"),
|
||||
"notifications": await get_setting_payload(db, "notifications"),
|
||||
"security": await get_setting_payload(db, "security"),
|
||||
"collectors": [serialize_collector(datasource) for datasource in datasources],
|
||||
"generated_at": to_iso8601_utc(datetime.now(UTC)),
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
@@ -8,6 +8,7 @@ from sqlalchemy import text
|
||||
from app.db.session import get_db
|
||||
from app.models.user import User
|
||||
from app.core.security import get_current_user
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.services.collectors.registry import collector_registry
|
||||
|
||||
|
||||
@@ -61,8 +62,8 @@ async def list_tasks(
|
||||
"datasource_id": t[1],
|
||||
"datasource_name": t[2],
|
||||
"status": t[3],
|
||||
"started_at": t[4].isoformat() if t[4] else None,
|
||||
"completed_at": t[5].isoformat() if t[5] else None,
|
||||
"started_at": to_iso8601_utc(t[4]),
|
||||
"completed_at": to_iso8601_utc(t[5]),
|
||||
"records_processed": t[6],
|
||||
"error_message": t[7],
|
||||
}
|
||||
@@ -100,8 +101,8 @@ async def get_task(
|
||||
"datasource_id": task[1],
|
||||
"datasource_name": task[2],
|
||||
"status": task[3],
|
||||
"started_at": task[4].isoformat() if task[4] else None,
|
||||
"completed_at": task[5].isoformat() if task[5] else None,
|
||||
"started_at": to_iso8601_utc(task[4]),
|
||||
"completed_at": to_iso8601_utc(task[5]),
|
||||
"records_processed": task[6],
|
||||
"error_message": task[7],
|
||||
}
|
||||
@@ -147,8 +148,8 @@ async def trigger_collection(
|
||||
"status": result.get("status", "unknown"),
|
||||
"records_processed": result.get("records_processed", 0),
|
||||
"error_message": result.get("error"),
|
||||
"started_at": datetime.utcnow(),
|
||||
"completed_at": datetime.utcnow(),
|
||||
"started_at": datetime.now(UTC),
|
||||
"completed_at": datetime.now(UTC),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -1,17 +1,30 @@
|
||||
"""Visualization API - GeoJSON endpoints for 3D Earth display"""
|
||||
"""Visualization API - GeoJSON endpoints for 3D Earth display
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends
|
||||
Unified API for all visualization data sources.
|
||||
Returns GeoJSON format compatible with Three.js, CesiumJS, and Unreal Cesium.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from fastapi import APIRouter, HTTPException, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import select, func
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
from app.core.collected_data_fields import get_record_field
|
||||
from app.core.satellite_tle import build_tle_lines_from_elements
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import get_db
|
||||
from app.models.bgp_anomaly import BGPAnomaly
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.services.cable_graph import build_graph_from_data, CableGraph
|
||||
from app.services.collectors.bgp_common import RIPE_RIS_COLLECTOR_COORDS
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# ============== Converter Functions ==============
|
||||
|
||||
|
||||
def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert cable records to GeoJSON FeatureCollection"""
|
||||
features = []
|
||||
@@ -66,6 +79,7 @@ def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"geometry": {"type": "MultiLineString", "coordinates": all_lines},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"cable_id": record.name,
|
||||
"source_id": record.source_id,
|
||||
"Name": record.name,
|
||||
"name": record.name,
|
||||
@@ -74,9 +88,9 @@ def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"rfs": metadata.get("rfs"),
|
||||
"RFS": metadata.get("rfs"),
|
||||
"status": metadata.get("status", "active"),
|
||||
"length": record.value,
|
||||
"length_km": record.value,
|
||||
"SHAPE__Length": record.value,
|
||||
"length": get_record_field(record, "value"),
|
||||
"length_km": get_record_field(record, "value"),
|
||||
"SHAPE__Length": get_record_field(record, "value"),
|
||||
"url": metadata.get("url"),
|
||||
"color": metadata.get("color"),
|
||||
"year": metadata.get("year"),
|
||||
@@ -87,14 +101,15 @@ def convert_cable_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_landing_point_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert landing point records to GeoJSON FeatureCollection"""
|
||||
def convert_landing_point_to_geojson(records: List[CollectedData], city_to_cable_ids_map: Dict[int, List[int]] = None, cable_id_to_name_map: Dict[int, str] = None) -> Dict[str, Any]:
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
try:
|
||||
lat = float(record.latitude) if record.latitude else None
|
||||
lon = float(record.longitude) if record.longitude else None
|
||||
latitude = get_record_field(record, "latitude")
|
||||
longitude = get_record_field(record, "longitude")
|
||||
lat = float(latitude) if latitude else None
|
||||
lon = float(longitude) if longitude else None
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
|
||||
@@ -102,18 +117,84 @@ def convert_landing_point_to_geojson(records: List[CollectedData]) -> Dict[str,
|
||||
continue
|
||||
|
||||
metadata = record.extra_data or {}
|
||||
city_id = metadata.get("city_id")
|
||||
|
||||
props = {
|
||||
"id": record.id,
|
||||
"source_id": record.source_id,
|
||||
"name": record.name,
|
||||
"country": get_record_field(record, "country"),
|
||||
"city": get_record_field(record, "city"),
|
||||
"is_tbd": metadata.get("is_tbd", False),
|
||||
}
|
||||
|
||||
cable_names = []
|
||||
if city_to_cable_ids_map and city_id in city_to_cable_ids_map:
|
||||
for cable_id in city_to_cable_ids_map[city_id]:
|
||||
if cable_id_to_name_map and cable_id in cable_id_to_name_map:
|
||||
cable_names.append(cable_id_to_name_map[cable_id])
|
||||
|
||||
if cable_names:
|
||||
props["cable_names"] = cable_names
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {"type": "Point", "coordinates": [lon, lat]},
|
||||
"properties": props,
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_satellite_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert satellite TLE records to GeoJSON"""
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
metadata = record.extra_data or {}
|
||||
norad_id = metadata.get("norad_cat_id")
|
||||
|
||||
if not norad_id:
|
||||
continue
|
||||
|
||||
tle_line1 = metadata.get("tle_line1")
|
||||
tle_line2 = metadata.get("tle_line2")
|
||||
if not tle_line1 or not tle_line2:
|
||||
tle_line1, tle_line2 = build_tle_lines_from_elements(
|
||||
norad_cat_id=norad_id,
|
||||
epoch=metadata.get("epoch"),
|
||||
inclination=metadata.get("inclination"),
|
||||
raan=metadata.get("raan"),
|
||||
eccentricity=metadata.get("eccentricity"),
|
||||
arg_of_perigee=metadata.get("arg_of_perigee"),
|
||||
mean_anomaly=metadata.get("mean_anomaly"),
|
||||
mean_motion=metadata.get("mean_motion"),
|
||||
)
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": norad_id,
|
||||
"geometry": {"type": "Point", "coordinates": [0, 0, 0]},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"source_id": record.source_id,
|
||||
"norad_cat_id": norad_id,
|
||||
"name": record.name,
|
||||
"country": record.country,
|
||||
"city": record.city,
|
||||
"is_tbd": metadata.get("is_tbd", False),
|
||||
"international_designator": metadata.get("international_designator"),
|
||||
"epoch": metadata.get("epoch"),
|
||||
"inclination": metadata.get("inclination"),
|
||||
"raan": metadata.get("raan"),
|
||||
"eccentricity": metadata.get("eccentricity"),
|
||||
"arg_of_perigee": metadata.get("arg_of_perigee"),
|
||||
"mean_anomaly": metadata.get("mean_anomaly"),
|
||||
"mean_motion": metadata.get("mean_motion"),
|
||||
"bstar": metadata.get("bstar"),
|
||||
"classification_type": metadata.get("classification_type"),
|
||||
"tle_line1": tle_line1,
|
||||
"tle_line2": tle_line2,
|
||||
"data_type": "satellite_tle",
|
||||
},
|
||||
}
|
||||
)
|
||||
@@ -121,6 +202,158 @@ def convert_landing_point_to_geojson(records: List[CollectedData]) -> Dict[str,
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_supercomputer_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert TOP500 supercomputer records to GeoJSON"""
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
try:
|
||||
latitude = get_record_field(record, "latitude")
|
||||
longitude = get_record_field(record, "longitude")
|
||||
lat = float(latitude) if latitude and latitude != "0.0" else None
|
||||
lon = (
|
||||
float(longitude) if longitude and longitude != "0.0" else None
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
lat, lon = None, None
|
||||
|
||||
metadata = record.extra_data or {}
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": record.id,
|
||||
"geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"name": record.name,
|
||||
"rank": metadata.get("rank"),
|
||||
"r_max": get_record_field(record, "rmax"),
|
||||
"r_peak": get_record_field(record, "rpeak"),
|
||||
"cores": get_record_field(record, "cores"),
|
||||
"power": get_record_field(record, "power"),
|
||||
"country": get_record_field(record, "country"),
|
||||
"city": get_record_field(record, "city"),
|
||||
"data_type": "supercomputer",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_gpu_cluster_to_geojson(records: List[CollectedData]) -> Dict[str, Any]:
|
||||
"""Convert GPU cluster records to GeoJSON"""
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
try:
|
||||
latitude = get_record_field(record, "latitude")
|
||||
longitude = get_record_field(record, "longitude")
|
||||
lat = float(latitude) if latitude else None
|
||||
lon = float(longitude) if longitude else None
|
||||
except (ValueError, TypeError):
|
||||
lat, lon = None, None
|
||||
|
||||
metadata = record.extra_data or {}
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": record.id,
|
||||
"geometry": {"type": "Point", "coordinates": [lon or 0, lat or 0]},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"name": record.name,
|
||||
"country": get_record_field(record, "country"),
|
||||
"city": get_record_field(record, "city"),
|
||||
"metadata": metadata,
|
||||
"data_type": "gpu_cluster",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_bgp_anomalies_to_geojson(records: List[BGPAnomaly]) -> Dict[str, Any]:
|
||||
features = []
|
||||
|
||||
for record in records:
|
||||
evidence = record.evidence or {}
|
||||
collectors = evidence.get("collectors") or record.peer_scope or []
|
||||
collector = collectors[0] if collectors else None
|
||||
location = None
|
||||
if collector:
|
||||
location = RIPE_RIS_COLLECTOR_COORDS.get(str(collector))
|
||||
|
||||
if location is None:
|
||||
nested = evidence.get("events") or []
|
||||
for item in nested:
|
||||
collector_name = (item or {}).get("collector")
|
||||
if collector_name and collector_name in RIPE_RIS_COLLECTOR_COORDS:
|
||||
location = RIPE_RIS_COLLECTOR_COORDS[collector_name]
|
||||
collector = collector_name
|
||||
break
|
||||
|
||||
if location is None:
|
||||
continue
|
||||
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [location["longitude"], location["latitude"]],
|
||||
},
|
||||
"properties": {
|
||||
"id": record.id,
|
||||
"collector": collector,
|
||||
"city": location.get("city"),
|
||||
"country": location.get("country"),
|
||||
"source": record.source,
|
||||
"anomaly_type": record.anomaly_type,
|
||||
"severity": record.severity,
|
||||
"status": record.status,
|
||||
"prefix": record.prefix,
|
||||
"origin_asn": record.origin_asn,
|
||||
"new_origin_asn": record.new_origin_asn,
|
||||
"confidence": record.confidence,
|
||||
"summary": record.summary,
|
||||
"created_at": to_iso8601_utc(record.created_at),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def convert_bgp_collectors_to_geojson() -> Dict[str, Any]:
|
||||
features = []
|
||||
|
||||
for collector, location in sorted(RIPE_RIS_COLLECTOR_COORDS.items()):
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [location["longitude"], location["latitude"]],
|
||||
},
|
||||
"properties": {
|
||||
"collector": collector,
|
||||
"city": location.get("city"),
|
||||
"country": location.get("country"),
|
||||
"status": "online",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
# ============== API Endpoints ==============
|
||||
|
||||
|
||||
@router.get("/geo/cables")
|
||||
async def get_cables_geojson(db: AsyncSession = Depends(get_db)):
|
||||
"""获取海底电缆 GeoJSON 数据 (LineString)"""
|
||||
@@ -144,11 +377,37 @@ async def get_cables_geojson(db: AsyncSession = Depends(get_db)):
|
||||
|
||||
@router.get("/geo/landing-points")
|
||||
async def get_landing_points_geojson(db: AsyncSession = Depends(get_db)):
|
||||
"""获取登陆点 GeoJSON 数据 (Point)"""
|
||||
try:
|
||||
stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
landing_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
|
||||
landing_result = await db.execute(landing_stmt)
|
||||
records = landing_result.scalars().all()
|
||||
|
||||
relation_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cable_landing_relation")
|
||||
relation_result = await db.execute(relation_stmt)
|
||||
relation_records = relation_result.scalars().all()
|
||||
|
||||
cable_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
|
||||
cable_result = await db.execute(cable_stmt)
|
||||
cable_records = cable_result.scalars().all()
|
||||
|
||||
city_to_cable_ids_map = {}
|
||||
for rel in relation_records:
|
||||
if rel.extra_data:
|
||||
city_id = rel.extra_data.get("city_id")
|
||||
cable_id = rel.extra_data.get("cable_id")
|
||||
if city_id is not None and cable_id is not None:
|
||||
if city_id not in city_to_cable_ids_map:
|
||||
city_to_cable_ids_map[city_id] = []
|
||||
if cable_id not in city_to_cable_ids_map[city_id]:
|
||||
city_to_cable_ids_map[city_id].append(cable_id)
|
||||
|
||||
cable_id_to_name_map = {}
|
||||
for cable in cable_records:
|
||||
if cable.extra_data:
|
||||
cable_id = cable.extra_data.get("cable_id")
|
||||
cable_name = cable.name
|
||||
if cable_id and cable_name:
|
||||
cable_id_to_name_map[cable_id] = cable_name
|
||||
|
||||
if not records:
|
||||
raise HTTPException(
|
||||
@@ -156,7 +415,7 @@ async def get_landing_points_geojson(db: AsyncSession = Depends(get_db)):
|
||||
detail="No landing point data found. Please run the arcgis_landing_points collector first.",
|
||||
)
|
||||
|
||||
return convert_landing_point_to_geojson(records)
|
||||
return convert_landing_point_to_geojson(records, city_to_cable_ids_map, cable_id_to_name_map)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
@@ -165,7 +424,6 @@ async def get_landing_points_geojson(db: AsyncSession = Depends(get_db)):
|
||||
|
||||
@router.get("/geo/all")
|
||||
async def get_all_geojson(db: AsyncSession = Depends(get_db)):
|
||||
"""获取所有可视化数据 (电缆 + 登陆点)"""
|
||||
cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
|
||||
cables_result = await db.execute(cables_stmt)
|
||||
cables_records = cables_result.scalars().all()
|
||||
@@ -174,13 +432,36 @@ async def get_all_geojson(db: AsyncSession = Depends(get_db)):
|
||||
points_result = await db.execute(points_stmt)
|
||||
points_records = points_result.scalars().all()
|
||||
|
||||
relation_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cable_landing_relation")
|
||||
relation_result = await db.execute(relation_stmt)
|
||||
relation_records = relation_result.scalars().all()
|
||||
|
||||
city_to_cable_ids_map = {}
|
||||
for rel in relation_records:
|
||||
if rel.extra_data:
|
||||
city_id = rel.extra_data.get("city_id")
|
||||
cable_id = rel.extra_data.get("cable_id")
|
||||
if city_id is not None and cable_id is not None:
|
||||
if city_id not in city_to_cable_ids_map:
|
||||
city_to_cable_ids_map[city_id] = []
|
||||
if cable_id not in city_to_cable_ids_map[city_id]:
|
||||
city_to_cable_ids_map[city_id].append(cable_id)
|
||||
|
||||
cable_id_to_name_map = {}
|
||||
for cable in cables_records:
|
||||
if cable.extra_data:
|
||||
cable_id = cable.extra_data.get("cable_id")
|
||||
cable_name = cable.name
|
||||
if cable_id and cable_name:
|
||||
cable_id_to_name_map[cable_id] = cable_name
|
||||
|
||||
cables = (
|
||||
convert_cable_to_geojson(cables_records)
|
||||
if cables_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
points = (
|
||||
convert_landing_point_to_geojson(points_records)
|
||||
convert_landing_point_to_geojson(points_records, city_to_cable_ids_map, cable_id_to_name_map)
|
||||
if points_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
@@ -195,6 +476,208 @@ async def get_all_geojson(db: AsyncSession = Depends(get_db)):
|
||||
}
|
||||
|
||||
|
||||
@router.get("/geo/satellites")
|
||||
async def get_satellites_geojson(
|
||||
limit: Optional[int] = Query(
|
||||
None,
|
||||
ge=1,
|
||||
description="Maximum number of satellites to return. Omit for no limit.",
|
||||
),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取卫星 TLE GeoJSON 数据"""
|
||||
stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "celestrak_tle")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
.order_by(CollectedData.id.desc())
|
||||
)
|
||||
if limit is not None:
|
||||
stmt = stmt.limit(limit)
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
return {"type": "FeatureCollection", "features": [], "count": 0}
|
||||
|
||||
geojson = convert_satellite_to_geojson(list(records))
|
||||
return {
|
||||
**geojson,
|
||||
"count": len(geojson.get("features", [])),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/geo/supercomputers")
|
||||
async def get_supercomputers_geojson(
|
||||
limit: int = 500,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取 TOP500 超算中心 GeoJSON 数据"""
|
||||
stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "top500")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
.limit(limit)
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
return {"type": "FeatureCollection", "features": [], "count": 0}
|
||||
|
||||
geojson = convert_supercomputer_to_geojson(list(records))
|
||||
return {
|
||||
**geojson,
|
||||
"count": len(geojson.get("features", [])),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/geo/gpu-clusters")
|
||||
async def get_gpu_clusters_geojson(
|
||||
limit: int = 100,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""获取 GPU 集群 GeoJSON 数据"""
|
||||
stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "epoch_ai_gpu")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
.limit(limit)
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
return {"type": "FeatureCollection", "features": [], "count": 0}
|
||||
|
||||
geojson = convert_gpu_cluster_to_geojson(list(records))
|
||||
return {
|
||||
**geojson,
|
||||
"count": len(geojson.get("features", [])),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/geo/bgp-anomalies")
|
||||
async def get_bgp_anomalies_geojson(
|
||||
severity: Optional[str] = Query(None),
|
||||
status: Optional[str] = Query("active"),
|
||||
limit: int = Query(200, ge=1, le=1000),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
stmt = select(BGPAnomaly).order_by(BGPAnomaly.created_at.desc()).limit(limit)
|
||||
if severity:
|
||||
stmt = stmt.where(BGPAnomaly.severity == severity)
|
||||
if status:
|
||||
stmt = stmt.where(BGPAnomaly.status == status)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = list(result.scalars().all())
|
||||
geojson = convert_bgp_anomalies_to_geojson(records)
|
||||
return {**geojson, "count": len(geojson.get("features", []))}
|
||||
|
||||
|
||||
@router.get("/geo/bgp-collectors")
|
||||
async def get_bgp_collectors_geojson():
|
||||
geojson = convert_bgp_collectors_to_geojson()
|
||||
return {**geojson, "count": len(geojson.get("features", []))}
|
||||
|
||||
|
||||
@router.get("/all")
|
||||
async def get_all_visualization_data(db: AsyncSession = Depends(get_db)):
|
||||
"""获取所有可视化数据的统一端点
|
||||
|
||||
Returns GeoJSON FeatureCollections for all data types:
|
||||
- satellites: 卫星 TLE 数据
|
||||
- cables: 海底电缆
|
||||
- landing_points: 登陆点
|
||||
- supercomputers: TOP500 超算
|
||||
- gpu_clusters: GPU 集群
|
||||
"""
|
||||
cables_stmt = select(CollectedData).where(CollectedData.source == "arcgis_cables")
|
||||
cables_result = await db.execute(cables_stmt)
|
||||
cables_records = list(cables_result.scalars().all())
|
||||
|
||||
points_stmt = select(CollectedData).where(CollectedData.source == "arcgis_landing_points")
|
||||
points_result = await db.execute(points_stmt)
|
||||
points_records = list(points_result.scalars().all())
|
||||
|
||||
satellites_stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "celestrak_tle")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
)
|
||||
satellites_result = await db.execute(satellites_stmt)
|
||||
satellites_records = list(satellites_result.scalars().all())
|
||||
|
||||
supercomputers_stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "top500")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
)
|
||||
supercomputers_result = await db.execute(supercomputers_stmt)
|
||||
supercomputers_records = list(supercomputers_result.scalars().all())
|
||||
|
||||
gpu_stmt = (
|
||||
select(CollectedData)
|
||||
.where(CollectedData.source == "epoch_ai_gpu")
|
||||
.where(CollectedData.name != "Unknown")
|
||||
)
|
||||
gpu_result = await db.execute(gpu_stmt)
|
||||
gpu_records = list(gpu_result.scalars().all())
|
||||
|
||||
cables = (
|
||||
convert_cable_to_geojson(cables_records)
|
||||
if cables_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
landing_points = (
|
||||
convert_landing_point_to_geojson(points_records)
|
||||
if points_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
satellites = (
|
||||
convert_satellite_to_geojson(satellites_records)
|
||||
if satellites_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
supercomputers = (
|
||||
convert_supercomputer_to_geojson(supercomputers_records)
|
||||
if supercomputers_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
gpu_clusters = (
|
||||
convert_gpu_cluster_to_geojson(gpu_records)
|
||||
if gpu_records
|
||||
else {"type": "FeatureCollection", "features": []}
|
||||
)
|
||||
|
||||
return {
|
||||
"generated_at": to_iso8601_utc(datetime.now(UTC)),
|
||||
"version": "1.0",
|
||||
"data": {
|
||||
"satellites": satellites,
|
||||
"cables": cables,
|
||||
"landing_points": landing_points,
|
||||
"supercomputers": supercomputers,
|
||||
"gpu_clusters": gpu_clusters,
|
||||
},
|
||||
"stats": {
|
||||
"total_features": (
|
||||
len(satellites.get("features", []))
|
||||
+ len(cables.get("features", []))
|
||||
+ len(landing_points.get("features", []))
|
||||
+ len(supercomputers.get("features", []))
|
||||
+ len(gpu_clusters.get("features", []))
|
||||
),
|
||||
"satellites": len(satellites.get("features", [])),
|
||||
"cables": len(cables.get("features", [])),
|
||||
"landing_points": len(landing_points.get("features", [])),
|
||||
"supercomputers": len(supercomputers.get("features", [])),
|
||||
"gpu_clusters": len(gpu_clusters.get("features", [])),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Cache for cable graph
|
||||
_cable_graph: Optional[CableGraph] = None
|
||||
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect, Query
|
||||
from jose import jwt, JWTError
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.core.websocket.manager import manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -59,6 +60,7 @@ async def websocket_endpoint(
|
||||
"ixp_nodes",
|
||||
"alerts",
|
||||
"dashboard",
|
||||
"datasource_tasks",
|
||||
],
|
||||
},
|
||||
}
|
||||
@@ -72,7 +74,7 @@ async def websocket_endpoint(
|
||||
await websocket.send_json(
|
||||
{
|
||||
"type": "heartbeat",
|
||||
"data": {"action": "pong", "timestamp": datetime.utcnow().isoformat()},
|
||||
"data": {"action": "pong", "timestamp": to_iso8601_utc(datetime.now(UTC))},
|
||||
}
|
||||
)
|
||||
elif data.get("type") == "subscribe":
|
||||
|
||||
62
backend/app/core/collected_data_fields.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
|
||||
FIELD_ALIASES = {
|
||||
"country": ("country",),
|
||||
"city": ("city",),
|
||||
"latitude": ("latitude",),
|
||||
"longitude": ("longitude",),
|
||||
"value": ("value",),
|
||||
"unit": ("unit",),
|
||||
"cores": ("cores",),
|
||||
"rmax": ("rmax", "r_max"),
|
||||
"rpeak": ("rpeak", "r_peak"),
|
||||
"power": ("power",),
|
||||
}
|
||||
|
||||
|
||||
def get_metadata_field(metadata: Optional[Dict[str, Any]], field: str, fallback: Any = None) -> Any:
|
||||
if isinstance(metadata, dict):
|
||||
for key in FIELD_ALIASES.get(field, (field,)):
|
||||
value = metadata.get(key)
|
||||
if value not in (None, ""):
|
||||
return value
|
||||
return fallback
|
||||
|
||||
|
||||
def build_dynamic_metadata(
|
||||
metadata: Optional[Dict[str, Any]],
|
||||
*,
|
||||
country: Any = None,
|
||||
city: Any = None,
|
||||
latitude: Any = None,
|
||||
longitude: Any = None,
|
||||
value: Any = None,
|
||||
unit: Any = None,
|
||||
) -> Dict[str, Any]:
|
||||
merged = dict(metadata) if isinstance(metadata, dict) else {}
|
||||
|
||||
fallbacks = {
|
||||
"country": country,
|
||||
"city": city,
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"value": value,
|
||||
"unit": unit,
|
||||
}
|
||||
|
||||
for field, fallback in fallbacks.items():
|
||||
if fallback not in (None, "") and get_metadata_field(merged, field) in (None, ""):
|
||||
merged[field] = fallback
|
||||
|
||||
return merged
|
||||
|
||||
|
||||
def get_record_field(record: Any, field: str) -> Any:
|
||||
metadata = getattr(record, "extra_data", None) or {}
|
||||
fallback_attr = field
|
||||
if field in {"cores", "rmax", "rpeak", "power"}:
|
||||
fallback = None
|
||||
else:
|
||||
fallback = getattr(record, fallback_attr, None)
|
||||
return get_metadata_field(metadata, field, fallback=fallback)
|
||||
@@ -6,9 +6,16 @@ import os
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
ROOT_DIR = Path(__file__).parent.parent.parent.parent
|
||||
VERSION_FILE = ROOT_DIR / "VERSION"
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
PROJECT_NAME: str = "Intelligent Planet Plan"
|
||||
VERSION: str = "1.0.0"
|
||||
VERSION: str = (
|
||||
os.getenv("APP_VERSION")
|
||||
or (VERSION_FILE.read_text(encoding="utf-8").strip() if VERSION_FILE.exists() else "0.19.0")
|
||||
)
|
||||
API_V1_STR: str = "/api/v1"
|
||||
SECRET_KEY: str = "your-secret-key-change-in-production"
|
||||
ALGORITHM: str = "HS256"
|
||||
@@ -27,6 +34,9 @@ class Settings(BaseSettings):
|
||||
|
||||
CORS_ORIGINS: List[str] = ["http://localhost:3000", "http://localhost:8000"]
|
||||
|
||||
SPACETRACK_USERNAME: str = ""
|
||||
SPACETRACK_PASSWORD: str = ""
|
||||
|
||||
@property
|
||||
def REDIS_URL(self) -> str:
|
||||
return os.getenv(
|
||||
@@ -34,7 +44,7 @@ class Settings(BaseSettings):
|
||||
)
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_file = Path(__file__).parent.parent.parent / ".env"
|
||||
case_sensitive = True
|
||||
|
||||
|
||||
|
||||
280
backend/app/core/countries.py
Normal file
@@ -0,0 +1,280 @@
|
||||
import re
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
COUNTRY_ENTRIES = [
|
||||
("阿富汗", ["Afghanistan", "AF", "AFG"]),
|
||||
("阿尔巴尼亚", ["Albania", "AL", "ALB"]),
|
||||
("阿尔及利亚", ["Algeria", "DZ", "DZA"]),
|
||||
("安道尔", ["Andorra", "AD", "AND"]),
|
||||
("安哥拉", ["Angola", "AO", "AGO"]),
|
||||
("安提瓜和巴布达", ["Antigua and Barbuda", "AG", "ATG"]),
|
||||
("阿根廷", ["Argentina", "AR", "ARG"]),
|
||||
("亚美尼亚", ["Armenia", "AM", "ARM"]),
|
||||
("澳大利亚", ["Australia", "AU", "AUS"]),
|
||||
("奥地利", ["Austria", "AT", "AUT"]),
|
||||
("阿塞拜疆", ["Azerbaijan", "AZ", "AZE"]),
|
||||
("巴哈马", ["Bahamas", "BS", "BHS"]),
|
||||
("巴林", ["Bahrain", "BH", "BHR"]),
|
||||
("孟加拉国", ["Bangladesh", "BD", "BGD"]),
|
||||
("巴巴多斯", ["Barbados", "BB", "BRB"]),
|
||||
("白俄罗斯", ["Belarus", "BY", "BLR"]),
|
||||
("比利时", ["Belgium", "BE", "BEL"]),
|
||||
("伯利兹", ["Belize", "BZ", "BLZ"]),
|
||||
("贝宁", ["Benin", "BJ", "BEN"]),
|
||||
("不丹", ["Bhutan", "BT", "BTN"]),
|
||||
("玻利维亚", ["Bolivia", "BO", "BOL", "Bolivia (Plurinational State of)"]),
|
||||
("波斯尼亚和黑塞哥维那", ["Bosnia and Herzegovina", "BA", "BIH"]),
|
||||
("博茨瓦纳", ["Botswana", "BW", "BWA"]),
|
||||
("巴西", ["Brazil", "BR", "BRA"]),
|
||||
("文莱", ["Brunei", "BN", "BRN", "Brunei Darussalam"]),
|
||||
("保加利亚", ["Bulgaria", "BG", "BGR"]),
|
||||
("布基纳法索", ["Burkina Faso", "BF", "BFA"]),
|
||||
("布隆迪", ["Burundi", "BI", "BDI"]),
|
||||
("柬埔寨", ["Cambodia", "KH", "KHM"]),
|
||||
("喀麦隆", ["Cameroon", "CM", "CMR"]),
|
||||
("加拿大", ["Canada", "CA", "CAN"]),
|
||||
("佛得角", ["Cape Verde", "CV", "CPV", "Cabo Verde"]),
|
||||
("中非", ["Central African Republic", "CF", "CAF"]),
|
||||
("乍得", ["Chad", "TD", "TCD"]),
|
||||
("智利", ["Chile", "CL", "CHL"]),
|
||||
("中国", ["China", "CN", "CHN", "Mainland China", "PRC", "People's Republic of China"]),
|
||||
("中国(香港)", ["Hong Kong", "HK", "HKG", "Hong Kong SAR", "China Hong Kong", "Hong Kong, China"]),
|
||||
("中国(澳门)", ["Macao", "Macau", "MO", "MAC", "Macao SAR", "China Macao", "Macau, China"]),
|
||||
("中国(台湾)", ["Taiwan", "TW", "TWN", "Chinese Taipei", "Taiwan, China"]),
|
||||
("哥伦比亚", ["Colombia", "CO", "COL"]),
|
||||
("科摩罗", ["Comoros", "KM", "COM"]),
|
||||
("刚果(布)", ["Republic of the Congo", "Congo", "Congo-Brazzaville", "CG", "COG"]),
|
||||
("刚果(金)", ["Democratic Republic of the Congo", "DR Congo", "Congo-Kinshasa", "CD", "COD"]),
|
||||
("哥斯达黎加", ["Costa Rica", "CR", "CRI"]),
|
||||
("科特迪瓦", ["Cote d'Ivoire", "Côte d'Ivoire", "Ivory Coast", "CI", "CIV"]),
|
||||
("克罗地亚", ["Croatia", "HR", "HRV"]),
|
||||
("古巴", ["Cuba", "CU", "CUB"]),
|
||||
("塞浦路斯", ["Cyprus", "CY", "CYP"]),
|
||||
("捷克", ["Czech Republic", "Czechia", "CZ", "CZE"]),
|
||||
("丹麦", ["Denmark", "DK", "DNK"]),
|
||||
("吉布提", ["Djibouti", "DJ", "DJI"]),
|
||||
("多米尼克", ["Dominica", "DM", "DMA"]),
|
||||
("多米尼加", ["Dominican Republic", "DO", "DOM"]),
|
||||
("厄瓜多尔", ["Ecuador", "EC", "ECU"]),
|
||||
("埃及", ["Egypt", "EG", "EGY"]),
|
||||
("萨尔瓦多", ["El Salvador", "SV", "SLV"]),
|
||||
("赤道几内亚", ["Equatorial Guinea", "GQ", "GNQ"]),
|
||||
("厄立特里亚", ["Eritrea", "ER", "ERI"]),
|
||||
("爱沙尼亚", ["Estonia", "EE", "EST"]),
|
||||
("埃斯瓦蒂尼", ["Eswatini", "SZ", "SWZ", "Swaziland"]),
|
||||
("埃塞俄比亚", ["Ethiopia", "ET", "ETH"]),
|
||||
("斐济", ["Fiji", "FJ", "FJI"]),
|
||||
("芬兰", ["Finland", "FI", "FIN"]),
|
||||
("法国", ["France", "FR", "FRA"]),
|
||||
("加蓬", ["Gabon", "GA", "GAB"]),
|
||||
("冈比亚", ["Gambia", "GM", "GMB"]),
|
||||
("格鲁吉亚", ["Georgia", "GE", "GEO"]),
|
||||
("德国", ["Germany", "DE", "DEU"]),
|
||||
("加纳", ["Ghana", "GH", "GHA"]),
|
||||
("希腊", ["Greece", "GR", "GRC"]),
|
||||
("格林纳达", ["Grenada", "GD", "GRD"]),
|
||||
("危地马拉", ["Guatemala", "GT", "GTM"]),
|
||||
("几内亚", ["Guinea", "GN", "GIN"]),
|
||||
("几内亚比绍", ["Guinea-Bissau", "GW", "GNB"]),
|
||||
("圭亚那", ["Guyana", "GY", "GUY"]),
|
||||
("海地", ["Haiti", "HT", "HTI"]),
|
||||
("洪都拉斯", ["Honduras", "HN", "HND"]),
|
||||
("匈牙利", ["Hungary", "HU", "HUN"]),
|
||||
("冰岛", ["Iceland", "IS", "ISL"]),
|
||||
("印度", ["India", "IN", "IND"]),
|
||||
("印度尼西亚", ["Indonesia", "ID", "IDN"]),
|
||||
("伊朗", ["Iran", "IR", "IRN", "Iran (Islamic Republic of)"]),
|
||||
("伊拉克", ["Iraq", "IQ", "IRQ"]),
|
||||
("爱尔兰", ["Ireland", "IE", "IRL"]),
|
||||
("以色列", ["Israel", "IL", "ISR"]),
|
||||
("意大利", ["Italy", "IT", "ITA"]),
|
||||
("牙买加", ["Jamaica", "JM", "JAM"]),
|
||||
("日本", ["Japan", "JP", "JPN"]),
|
||||
("约旦", ["Jordan", "JO", "JOR"]),
|
||||
("哈萨克斯坦", ["Kazakhstan", "KZ", "KAZ"]),
|
||||
("肯尼亚", ["Kenya", "KE", "KEN"]),
|
||||
("基里巴斯", ["Kiribati", "KI", "KIR"]),
|
||||
("朝鲜", ["North Korea", "Korea, DPRK", "Democratic People's Republic of Korea", "KP", "PRK"]),
|
||||
("韩国", ["South Korea", "Republic of Korea", "Korea", "KR", "KOR"]),
|
||||
("科威特", ["Kuwait", "KW", "KWT"]),
|
||||
("吉尔吉斯斯坦", ["Kyrgyzstan", "KG", "KGZ"]),
|
||||
("老挝", ["Laos", "Lao PDR", "Lao People's Democratic Republic", "LA", "LAO"]),
|
||||
("拉脱维亚", ["Latvia", "LV", "LVA"]),
|
||||
("黎巴嫩", ["Lebanon", "LB", "LBN"]),
|
||||
("莱索托", ["Lesotho", "LS", "LSO"]),
|
||||
("利比里亚", ["Liberia", "LR", "LBR"]),
|
||||
("利比亚", ["Libya", "LY", "LBY"]),
|
||||
("列支敦士登", ["Liechtenstein", "LI", "LIE"]),
|
||||
("立陶宛", ["Lithuania", "LT", "LTU"]),
|
||||
("卢森堡", ["Luxembourg", "LU", "LUX"]),
|
||||
("马达加斯加", ["Madagascar", "MG", "MDG"]),
|
||||
("马拉维", ["Malawi", "MW", "MWI"]),
|
||||
("马来西亚", ["Malaysia", "MY", "MYS"]),
|
||||
("马尔代夫", ["Maldives", "MV", "MDV"]),
|
||||
("马里", ["Mali", "ML", "MLI"]),
|
||||
("马耳他", ["Malta", "MT", "MLT"]),
|
||||
("马绍尔群岛", ["Marshall Islands", "MH", "MHL"]),
|
||||
("毛里塔尼亚", ["Mauritania", "MR", "MRT"]),
|
||||
("毛里求斯", ["Mauritius", "MU", "MUS"]),
|
||||
("墨西哥", ["Mexico", "MX", "MEX"]),
|
||||
("密克罗尼西亚", ["Micronesia", "FM", "FSM", "Federated States of Micronesia"]),
|
||||
("摩尔多瓦", ["Moldova", "MD", "MDA", "Republic of Moldova"]),
|
||||
("摩纳哥", ["Monaco", "MC", "MCO"]),
|
||||
("蒙古", ["Mongolia", "MN", "MNG"]),
|
||||
("黑山", ["Montenegro", "ME", "MNE"]),
|
||||
("摩洛哥", ["Morocco", "MA", "MAR"]),
|
||||
("莫桑比克", ["Mozambique", "MZ", "MOZ"]),
|
||||
("缅甸", ["Myanmar", "MM", "MMR", "Burma"]),
|
||||
("纳米比亚", ["Namibia", "NA", "NAM"]),
|
||||
("瑙鲁", ["Nauru", "NR", "NRU"]),
|
||||
("尼泊尔", ["Nepal", "NP", "NPL"]),
|
||||
("荷兰", ["Netherlands", "NL", "NLD"]),
|
||||
("新西兰", ["New Zealand", "NZ", "NZL"]),
|
||||
("尼加拉瓜", ["Nicaragua", "NI", "NIC"]),
|
||||
("尼日尔", ["Niger", "NE", "NER"]),
|
||||
("尼日利亚", ["Nigeria", "NG", "NGA"]),
|
||||
("北马其顿", ["North Macedonia", "MK", "MKD", "Macedonia"]),
|
||||
("挪威", ["Norway", "NO", "NOR"]),
|
||||
("阿曼", ["Oman", "OM", "OMN"]),
|
||||
("巴基斯坦", ["Pakistan", "PK", "PAK"]),
|
||||
("帕劳", ["Palau", "PW", "PLW"]),
|
||||
("巴勒斯坦", ["Palestine", "PS", "PSE", "State of Palestine"]),
|
||||
("巴拿马", ["Panama", "PA", "PAN"]),
|
||||
("巴布亚新几内亚", ["Papua New Guinea", "PG", "PNG"]),
|
||||
("巴拉圭", ["Paraguay", "PY", "PRY"]),
|
||||
("秘鲁", ["Peru", "PE", "PER"]),
|
||||
("菲律宾", ["Philippines", "PH", "PHL"]),
|
||||
("波兰", ["Poland", "PL", "POL"]),
|
||||
("葡萄牙", ["Portugal", "PT", "PRT"]),
|
||||
("卡塔尔", ["Qatar", "QA", "QAT"]),
|
||||
("罗马尼亚", ["Romania", "RO", "ROU"]),
|
||||
("俄罗斯", ["Russia", "Russian Federation", "RU", "RUS"]),
|
||||
("卢旺达", ["Rwanda", "RW", "RWA"]),
|
||||
("圣基茨和尼维斯", ["Saint Kitts and Nevis", "KN", "KNA"]),
|
||||
("圣卢西亚", ["Saint Lucia", "LC", "LCA"]),
|
||||
("圣文森特和格林纳丁斯", ["Saint Vincent and the Grenadines", "VC", "VCT"]),
|
||||
("萨摩亚", ["Samoa", "WS", "WSM"]),
|
||||
("圣马力诺", ["San Marino", "SM", "SMR"]),
|
||||
("圣多美和普林西比", ["Sao Tome and Principe", "ST", "STP", "São Tomé and Príncipe"]),
|
||||
("沙特阿拉伯", ["Saudi Arabia", "SA", "SAU"]),
|
||||
("塞内加尔", ["Senegal", "SN", "SEN"]),
|
||||
("塞尔维亚", ["Serbia", "RS", "SRB", "Kosovo", "XK", "XKS", "Republic of Kosovo"]),
|
||||
("塞舌尔", ["Seychelles", "SC", "SYC"]),
|
||||
("塞拉利昂", ["Sierra Leone", "SL", "SLE"]),
|
||||
("新加坡", ["Singapore", "SG", "SGP"]),
|
||||
("斯洛伐克", ["Slovakia", "SK", "SVK"]),
|
||||
("斯洛文尼亚", ["Slovenia", "SI", "SVN"]),
|
||||
("所罗门群岛", ["Solomon Islands", "SB", "SLB"]),
|
||||
("索马里", ["Somalia", "SO", "SOM"]),
|
||||
("南非", ["South Africa", "ZA", "ZAF"]),
|
||||
("南苏丹", ["South Sudan", "SS", "SSD"]),
|
||||
("西班牙", ["Spain", "ES", "ESP"]),
|
||||
("斯里兰卡", ["Sri Lanka", "LK", "LKA"]),
|
||||
("苏丹", ["Sudan", "SD", "SDN"]),
|
||||
("苏里南", ["Suriname", "SR", "SUR"]),
|
||||
("瑞典", ["Sweden", "SE", "SWE"]),
|
||||
("瑞士", ["Switzerland", "CH", "CHE"]),
|
||||
("叙利亚", ["Syria", "SY", "SYR", "Syrian Arab Republic"]),
|
||||
("塔吉克斯坦", ["Tajikistan", "TJ", "TJK"]),
|
||||
("坦桑尼亚", ["Tanzania", "TZ", "TZA", "United Republic of Tanzania"]),
|
||||
("泰国", ["Thailand", "TH", "THA"]),
|
||||
("东帝汶", ["Timor-Leste", "East Timor", "TL", "TLS"]),
|
||||
("多哥", ["Togo", "TG", "TGO"]),
|
||||
("汤加", ["Tonga", "TO", "TON"]),
|
||||
("特立尼达和多巴哥", ["Trinidad and Tobago", "TT", "TTO"]),
|
||||
("突尼斯", ["Tunisia", "TN", "TUN"]),
|
||||
("土耳其", ["Turkey", "TR", "TUR", "Türkiye"]),
|
||||
("土库曼斯坦", ["Turkmenistan", "TM", "TKM"]),
|
||||
("图瓦卢", ["Tuvalu", "TV", "TUV"]),
|
||||
("乌干达", ["Uganda", "UG", "UGA"]),
|
||||
("乌克兰", ["Ukraine", "UA", "UKR"]),
|
||||
("阿联酋", ["United Arab Emirates", "AE", "ARE", "UAE"]),
|
||||
("英国", ["United Kingdom", "UK", "GB", "GBR", "Great Britain", "Britain", "England"]),
|
||||
("美国", ["United States", "United States of America", "US", "USA", "U.S.", "U.S.A."]),
|
||||
("乌拉圭", ["Uruguay", "UY", "URY"]),
|
||||
("乌兹别克斯坦", ["Uzbekistan", "UZ", "UZB"]),
|
||||
("瓦努阿图", ["Vanuatu", "VU", "VUT"]),
|
||||
("梵蒂冈", ["Vatican City", "Holy See", "VA", "VAT"]),
|
||||
("委内瑞拉", ["Venezuela", "VE", "VEN", "Venezuela (Bolivarian Republic of)"]),
|
||||
("越南", ["Vietnam", "Viet Nam", "VN", "VNM"]),
|
||||
("也门", ["Yemen", "YE", "YEM"]),
|
||||
("赞比亚", ["Zambia", "ZM", "ZMB"]),
|
||||
("津巴布韦", ["Zimbabwe", "ZW", "ZWE"]),
|
||||
]
|
||||
|
||||
|
||||
COUNTRY_OPTIONS = [entry[0] for entry in COUNTRY_ENTRIES]
|
||||
CANONICAL_COUNTRY_SET = set(COUNTRY_OPTIONS)
|
||||
INVALID_COUNTRY_VALUES = {
|
||||
"",
|
||||
"-",
|
||||
"--",
|
||||
"unknown",
|
||||
"n/a",
|
||||
"na",
|
||||
"none",
|
||||
"null",
|
||||
"global",
|
||||
"world",
|
||||
"worldwide",
|
||||
"xx",
|
||||
}
|
||||
NUMERIC_LIKE_PATTERN = re.compile(r"^[\d\s,._%+\-]+$")
|
||||
|
||||
COUNTRY_ALIAS_MAP = {}
|
||||
COUNTRY_VARIANTS_MAP = {}
|
||||
for canonical, aliases in COUNTRY_ENTRIES:
|
||||
COUNTRY_ALIAS_MAP[canonical.casefold()] = canonical
|
||||
variants = [canonical, *aliases]
|
||||
COUNTRY_VARIANTS_MAP[canonical] = variants
|
||||
for alias in aliases:
|
||||
COUNTRY_ALIAS_MAP[alias.casefold()] = canonical
|
||||
|
||||
|
||||
def normalize_country(value: Any) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
|
||||
normalized = re.sub(r"\s+", " ", value.strip())
|
||||
normalized = normalized.replace("(", "(").replace(")", ")")
|
||||
|
||||
if not normalized:
|
||||
return None
|
||||
|
||||
lowered = normalized.casefold()
|
||||
if lowered in INVALID_COUNTRY_VALUES:
|
||||
return None
|
||||
|
||||
if NUMERIC_LIKE_PATTERN.fullmatch(normalized):
|
||||
return None
|
||||
|
||||
if normalized in CANONICAL_COUNTRY_SET:
|
||||
return normalized
|
||||
|
||||
return COUNTRY_ALIAS_MAP.get(lowered)
|
||||
|
||||
|
||||
def get_country_search_variants(value: Any) -> list[str]:
|
||||
canonical = normalize_country(value)
|
||||
if canonical is None:
|
||||
return []
|
||||
|
||||
variants = []
|
||||
seen = set()
|
||||
for item in COUNTRY_VARIANTS_MAP.get(canonical, [canonical]):
|
||||
if not isinstance(item, str):
|
||||
continue
|
||||
normalized = re.sub(r"\s+", " ", item.strip())
|
||||
if not normalized:
|
||||
continue
|
||||
key = normalized.casefold()
|
||||
if key in seen:
|
||||
continue
|
||||
seen.add(key)
|
||||
variants.append(normalized)
|
||||
|
||||
return variants
|
||||
@@ -22,6 +22,9 @@ COLLECTOR_URL_KEYS = {
|
||||
"peeringdb_facility": "peeringdb.facility_url",
|
||||
"top500": "top500.url",
|
||||
"epoch_ai_gpu": "epoch_ai.gpu_clusters_url",
|
||||
"spacetrack_tle": "spacetrack.tle_query_url",
|
||||
"ris_live_bgp": "ris_live.url",
|
||||
"bgpstream_bgp": "bgpstream.url",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -33,3 +33,13 @@ top500:
|
||||
|
||||
epoch_ai:
|
||||
gpu_clusters_url: "https://epoch.ai/data/gpu-clusters"
|
||||
|
||||
spacetrack:
|
||||
base_url: "https://www.space-track.org"
|
||||
tle_query_url: "https://www.space-track.org/basicspacedata/query/class/gp/orderby/EPOCH%20desc/limit/1000/format/json"
|
||||
|
||||
ris_live:
|
||||
url: "https://ris-live.ripe.net/v1/stream/?format=json&client=planet-ris-live"
|
||||
|
||||
bgpstream:
|
||||
url: "https://broker.bgpstream.caida.org/v2"
|
||||
|
||||
140
backend/app/core/datasource_defaults.py
Normal file
@@ -0,0 +1,140 @@
|
||||
"""Default built-in datasource definitions."""
|
||||
|
||||
DEFAULT_DATASOURCES = {
|
||||
"top500": {
|
||||
"id": 1,
|
||||
"name": "TOP500 Supercomputers",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_minutes": 240,
|
||||
},
|
||||
"epoch_ai_gpu": {
|
||||
"id": 2,
|
||||
"name": "Epoch AI GPU Clusters",
|
||||
"module": "L1",
|
||||
"priority": "P0",
|
||||
"frequency_minutes": 360,
|
||||
},
|
||||
"huggingface_models": {
|
||||
"id": 3,
|
||||
"name": "HuggingFace Models",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 720,
|
||||
},
|
||||
"huggingface_datasets": {
|
||||
"id": 4,
|
||||
"name": "HuggingFace Datasets",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 720,
|
||||
},
|
||||
"huggingface_spaces": {
|
||||
"id": 5,
|
||||
"name": "HuggingFace Spaces",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
"peeringdb_ixp": {
|
||||
"id": 6,
|
||||
"name": "PeeringDB IXP",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
"peeringdb_network": {
|
||||
"id": 7,
|
||||
"name": "PeeringDB Networks",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 2880,
|
||||
},
|
||||
"peeringdb_facility": {
|
||||
"id": 8,
|
||||
"name": "PeeringDB Facilities",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 2880,
|
||||
},
|
||||
"telegeography_cables": {
|
||||
"id": 9,
|
||||
"name": "Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"telegeography_landing": {
|
||||
"id": 10,
|
||||
"name": "Cable Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"telegeography_systems": {
|
||||
"id": 11,
|
||||
"name": "Cable Systems",
|
||||
"module": "L2",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"arcgis_cables": {
|
||||
"id": 15,
|
||||
"name": "ArcGIS Submarine Cables",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"arcgis_landing_points": {
|
||||
"id": 16,
|
||||
"name": "ArcGIS Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"arcgis_cable_landing_relation": {
|
||||
"id": 17,
|
||||
"name": "ArcGIS Cable-Landing Relations",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"fao_landing_points": {
|
||||
"id": 18,
|
||||
"name": "FAO Landing Points",
|
||||
"module": "L2",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 10080,
|
||||
},
|
||||
"spacetrack_tle": {
|
||||
"id": 19,
|
||||
"name": "Space-Track TLE",
|
||||
"module": "L3",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
"celestrak_tle": {
|
||||
"id": 20,
|
||||
"name": "CelesTrak TLE",
|
||||
"module": "L3",
|
||||
"priority": "P2",
|
||||
"frequency_minutes": 1440,
|
||||
},
|
||||
"ris_live_bgp": {
|
||||
"id": 21,
|
||||
"name": "RIPE RIS Live BGP",
|
||||
"module": "L3",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 15,
|
||||
},
|
||||
"bgpstream_bgp": {
|
||||
"id": 22,
|
||||
"name": "CAIDA BGPStream Backfill",
|
||||
"module": "L3",
|
||||
"priority": "P1",
|
||||
"frequency_minutes": 360,
|
||||
},
|
||||
}
|
||||
|
||||
ID_TO_COLLECTOR = {info["id"]: name for name, info in DEFAULT_DATASOURCES.items()}
|
||||
COLLECTOR_TO_ID = {name: info["id"] for name, info in DEFAULT_DATASOURCES.items()}
|
||||
116
backend/app/core/satellite_tle.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Helpers for building stable TLE lines from orbital elements."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
def compute_tle_checksum(line: str) -> str:
|
||||
"""Compute the standard modulo-10 checksum for a TLE line."""
|
||||
total = 0
|
||||
|
||||
for char in line[:68]:
|
||||
if char.isdigit():
|
||||
total += int(char)
|
||||
elif char == "-":
|
||||
total += 1
|
||||
|
||||
return str(total % 10)
|
||||
|
||||
|
||||
def _parse_epoch(value: Any) -> Optional[datetime]:
|
||||
if not value:
|
||||
return None
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
if isinstance(value, str):
|
||||
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
return None
|
||||
|
||||
|
||||
def build_tle_line1(norad_cat_id: Any, epoch: Any) -> Optional[str]:
|
||||
"""Build a valid TLE line 1 from the NORAD id and epoch."""
|
||||
epoch_date = _parse_epoch(epoch)
|
||||
if not norad_cat_id or epoch_date is None:
|
||||
return None
|
||||
|
||||
epoch_year = epoch_date.year % 100
|
||||
start_of_year = datetime(epoch_date.year, 1, 1, tzinfo=epoch_date.tzinfo)
|
||||
day_of_year = (epoch_date - start_of_year).days + 1
|
||||
ms_of_day = (
|
||||
epoch_date.hour * 3600000
|
||||
+ epoch_date.minute * 60000
|
||||
+ epoch_date.second * 1000
|
||||
+ int(epoch_date.microsecond / 1000)
|
||||
)
|
||||
day_fraction = ms_of_day / 86400000
|
||||
decimal_fraction = f"{day_fraction:.8f}"[1:]
|
||||
epoch_str = f"{epoch_year:02d}{day_of_year:03d}{decimal_fraction}"
|
||||
|
||||
core = (
|
||||
f"1 {int(norad_cat_id):05d}U 00001A {epoch_str}"
|
||||
" .00000000 00000-0 00000-0 0 999"
|
||||
)
|
||||
return core + compute_tle_checksum(core)
|
||||
|
||||
|
||||
def build_tle_line2(
|
||||
norad_cat_id: Any,
|
||||
inclination: Any,
|
||||
raan: Any,
|
||||
eccentricity: Any,
|
||||
arg_of_perigee: Any,
|
||||
mean_anomaly: Any,
|
||||
mean_motion: Any,
|
||||
) -> Optional[str]:
|
||||
"""Build a valid TLE line 2 from the standard orbital elements."""
|
||||
required = [
|
||||
norad_cat_id,
|
||||
inclination,
|
||||
raan,
|
||||
eccentricity,
|
||||
arg_of_perigee,
|
||||
mean_anomaly,
|
||||
mean_motion,
|
||||
]
|
||||
if any(value is None for value in required):
|
||||
return None
|
||||
|
||||
eccentricity_digits = str(round(float(eccentricity) * 10_000_000)).zfill(7)
|
||||
core = (
|
||||
f"2 {int(norad_cat_id):05d}"
|
||||
f" {float(inclination):8.4f}"
|
||||
f" {float(raan):8.4f}"
|
||||
f" {eccentricity_digits}"
|
||||
f" {float(arg_of_perigee):8.4f}"
|
||||
f" {float(mean_anomaly):8.4f}"
|
||||
f" {float(mean_motion):11.8f}"
|
||||
"00000"
|
||||
)
|
||||
return core + compute_tle_checksum(core)
|
||||
|
||||
|
||||
def build_tle_lines_from_elements(
|
||||
*,
|
||||
norad_cat_id: Any,
|
||||
epoch: Any,
|
||||
inclination: Any,
|
||||
raan: Any,
|
||||
eccentricity: Any,
|
||||
arg_of_perigee: Any,
|
||||
mean_anomaly: Any,
|
||||
mean_motion: Any,
|
||||
) -> tuple[Optional[str], Optional[str]]:
|
||||
"""Build both TLE lines from a metadata payload."""
|
||||
line1 = build_tle_line1(norad_cat_id, epoch)
|
||||
line2 = build_tle_line2(
|
||||
norad_cat_id,
|
||||
inclination,
|
||||
raan,
|
||||
eccentricity,
|
||||
arg_of_perigee,
|
||||
mean_anomaly,
|
||||
mean_motion,
|
||||
)
|
||||
return line1, line2
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import bcrypt
|
||||
@@ -49,9 +49,9 @@ def get_password_hash(password: str) -> str:
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
expire = datetime.now(UTC) + expires_delta
|
||||
elif settings.ACCESS_TOKEN_EXPIRE_MINUTES > 0:
|
||||
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
expire = datetime.now(UTC) + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
else:
|
||||
expire = None
|
||||
if expire:
|
||||
@@ -65,7 +65,7 @@ def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -
|
||||
def create_refresh_token(data: dict) -> str:
|
||||
to_encode = data.copy()
|
||||
if settings.REFRESH_TOKEN_EXPIRE_DAYS > 0:
|
||||
expire = datetime.utcnow() + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
expire = datetime.now(UTC) + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
to_encode.update({"exp": expire})
|
||||
to_encode.update({"type": "refresh"})
|
||||
if "sub" in to_encode:
|
||||
|
||||
20
backend/app/core/time.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Time helpers for API serialization."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
|
||||
def ensure_utc(value: datetime | None) -> datetime | None:
|
||||
if value is None:
|
||||
return None
|
||||
if value.tzinfo is None:
|
||||
return value.replace(tzinfo=UTC)
|
||||
return value.astimezone(UTC)
|
||||
|
||||
|
||||
def to_iso8601_utc(value: datetime | None) -> str | None:
|
||||
normalized = ensure_utc(value)
|
||||
if normalized is None:
|
||||
return None
|
||||
return normalized.isoformat().replace("+00:00", "Z")
|
||||
@@ -1,9 +1,10 @@
|
||||
"""Data broadcaster for WebSocket connections"""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.core.websocket.manager import manager
|
||||
|
||||
|
||||
@@ -22,7 +23,7 @@ class DataBroadcaster:
|
||||
"active_datasources": 8,
|
||||
"tasks_today": 45,
|
||||
"success_rate": 97.8,
|
||||
"last_updated": datetime.utcnow().isoformat(),
|
||||
"last_updated": to_iso8601_utc(datetime.now(UTC)),
|
||||
"alerts": {"critical": 0, "warning": 2, "info": 5},
|
||||
}
|
||||
|
||||
@@ -35,7 +36,7 @@ class DataBroadcaster:
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": "dashboard",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"payload": {"stats": stats},
|
||||
},
|
||||
channel="dashboard",
|
||||
@@ -49,7 +50,7 @@ class DataBroadcaster:
|
||||
await manager.broadcast(
|
||||
{
|
||||
"type": "alert_notification",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"data": {"alert": alert},
|
||||
}
|
||||
)
|
||||
@@ -60,7 +61,7 @@ class DataBroadcaster:
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": "gpu_clusters",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"payload": data,
|
||||
}
|
||||
)
|
||||
@@ -71,12 +72,24 @@ class DataBroadcaster:
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": channel,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"payload": data,
|
||||
},
|
||||
channel=channel if channel in manager.active_connections else "all",
|
||||
)
|
||||
|
||||
async def broadcast_datasource_task_update(self, data: Dict[str, Any]):
|
||||
"""Broadcast datasource task progress updates to connected clients."""
|
||||
await manager.broadcast(
|
||||
{
|
||||
"type": "data_frame",
|
||||
"channel": "datasource_tasks",
|
||||
"timestamp": to_iso8601_utc(datetime.now(UTC)),
|
||||
"payload": data,
|
||||
},
|
||||
channel="all",
|
||||
)
|
||||
|
||||
def start(self):
|
||||
"""Start all broadcasters"""
|
||||
if not self.running:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
|
||||
from sqlalchemy.orm import declarative_base
|
||||
|
||||
@@ -25,11 +26,126 @@ async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
raise
|
||||
|
||||
|
||||
async def seed_default_datasources(session: AsyncSession):
|
||||
from app.core.datasource_defaults import DEFAULT_DATASOURCES
|
||||
from app.models.datasource import DataSource
|
||||
|
||||
for source, info in DEFAULT_DATASOURCES.items():
|
||||
existing = await session.get(DataSource, info["id"])
|
||||
if existing:
|
||||
existing.name = info["name"]
|
||||
existing.source = source
|
||||
existing.module = info["module"]
|
||||
existing.priority = info["priority"]
|
||||
existing.frequency_minutes = info["frequency_minutes"]
|
||||
existing.collector_class = source
|
||||
if existing.config is None:
|
||||
existing.config = "{}"
|
||||
continue
|
||||
|
||||
session.add(
|
||||
DataSource(
|
||||
id=info["id"],
|
||||
name=info["name"],
|
||||
source=source,
|
||||
module=info["module"],
|
||||
priority=info["priority"],
|
||||
frequency_minutes=info["frequency_minutes"],
|
||||
collector_class=source,
|
||||
config="{}",
|
||||
is_active=True,
|
||||
)
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
||||
|
||||
async def ensure_default_admin_user(session: AsyncSession):
|
||||
from app.core.security import get_password_hash
|
||||
from app.models.user import User
|
||||
|
||||
result = await session.execute(
|
||||
text("SELECT id FROM users WHERE username = 'admin'")
|
||||
)
|
||||
if result.fetchone():
|
||||
return
|
||||
|
||||
session.add(
|
||||
User(
|
||||
username="admin",
|
||||
email="admin@planet.local",
|
||||
password_hash=get_password_hash("admin123"),
|
||||
role="super_admin",
|
||||
is_active=True,
|
||||
)
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
|
||||
async def init_db():
|
||||
import app.models.user # noqa: F401
|
||||
import app.models.gpu_cluster # noqa: F401
|
||||
import app.models.task # noqa: F401
|
||||
import app.models.data_snapshot # noqa: F401
|
||||
import app.models.datasource # noqa: F401
|
||||
import app.models.datasource_config # noqa: F401
|
||||
import app.models.alert # noqa: F401
|
||||
import app.models.bgp_anomaly # noqa: F401
|
||||
import app.models.collected_data # noqa: F401
|
||||
import app.models.system_setting # noqa: F401
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
await conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE collected_data
|
||||
ADD COLUMN IF NOT EXISTS snapshot_id INTEGER,
|
||||
ADD COLUMN IF NOT EXISTS task_id INTEGER,
|
||||
ADD COLUMN IF NOT EXISTS entity_key VARCHAR(255),
|
||||
ADD COLUMN IF NOT EXISTS is_current BOOLEAN DEFAULT TRUE,
|
||||
ADD COLUMN IF NOT EXISTS previous_record_id INTEGER,
|
||||
ADD COLUMN IF NOT EXISTS change_type VARCHAR(20),
|
||||
ADD COLUMN IF NOT EXISTS change_summary JSONB DEFAULT '{}'::jsonb,
|
||||
ADD COLUMN IF NOT EXISTS deleted_at TIMESTAMPTZ
|
||||
"""
|
||||
)
|
||||
)
|
||||
await conn.execute(
|
||||
text(
|
||||
"""
|
||||
ALTER TABLE collection_tasks
|
||||
ADD COLUMN IF NOT EXISTS phase VARCHAR(30) DEFAULT 'queued'
|
||||
"""
|
||||
)
|
||||
)
|
||||
await conn.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_collected_data_source_source_id
|
||||
ON collected_data (source, source_id)
|
||||
"""
|
||||
)
|
||||
)
|
||||
await conn.execute(
|
||||
text(
|
||||
"""
|
||||
UPDATE collected_data
|
||||
SET entity_key = source || ':' || COALESCE(source_id, id::text)
|
||||
WHERE entity_key IS NULL
|
||||
"""
|
||||
)
|
||||
)
|
||||
await conn.execute(
|
||||
text(
|
||||
"""
|
||||
UPDATE collected_data
|
||||
SET is_current = TRUE
|
||||
WHERE is_current IS NULL
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
async with async_session_factory() as session:
|
||||
await seed_default_datasources(session)
|
||||
await ensure_default_admin_user(session)
|
||||
|
||||
@@ -2,15 +2,19 @@ from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.websocket.broadcaster import broadcaster
|
||||
from app.db.session import init_db, async_session_factory
|
||||
from app.api.main import api_router
|
||||
from app.api.v1 import websocket
|
||||
from app.services.scheduler import start_scheduler, stop_scheduler
|
||||
from app.core.config import settings
|
||||
from app.core.websocket.broadcaster import broadcaster
|
||||
from app.db.session import init_db
|
||||
from app.services.scheduler import (
|
||||
cleanup_stale_running_tasks,
|
||||
start_scheduler,
|
||||
stop_scheduler,
|
||||
sync_scheduler_with_datasources,
|
||||
)
|
||||
|
||||
|
||||
class WebSocketCORSMiddleware(BaseHTTPMiddleware):
|
||||
@@ -27,7 +31,9 @@ class WebSocketCORSMiddleware(BaseHTTPMiddleware):
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await init_db()
|
||||
await cleanup_stale_running_tasks()
|
||||
start_scheduler()
|
||||
await sync_scheduler_with_datasources()
|
||||
broadcaster.start()
|
||||
yield
|
||||
broadcaster.stop()
|
||||
@@ -60,16 +66,11 @@ app.include_router(websocket.router)
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""健康检查端点"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"version": settings.VERSION,
|
||||
}
|
||||
return {"status": "healthy", "version": settings.VERSION}
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""API根目录"""
|
||||
return {
|
||||
"name": settings.PROJECT_NAME,
|
||||
"version": settings.VERSION,
|
||||
@@ -80,7 +81,6 @@ async def root():
|
||||
|
||||
@app.get("/api/v1/scheduler/jobs")
|
||||
async def get_scheduler_jobs():
|
||||
"""获取调度任务列表"""
|
||||
from app.services.scheduler import get_scheduler_jobs
|
||||
|
||||
return {"jobs": get_scheduler_jobs()}
|
||||
|
||||
@@ -1,15 +1,23 @@
|
||||
from app.models.user import User
|
||||
from app.models.gpu_cluster import GPUCluster
|
||||
from app.models.task import CollectionTask
|
||||
from app.models.data_snapshot import DataSnapshot
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.datasource_config import DataSourceConfig
|
||||
from app.models.alert import Alert, AlertSeverity, AlertStatus
|
||||
from app.models.bgp_anomaly import BGPAnomaly
|
||||
from app.models.system_setting import SystemSetting
|
||||
|
||||
__all__ = [
|
||||
"User",
|
||||
"GPUCluster",
|
||||
"CollectionTask",
|
||||
"DataSnapshot",
|
||||
"DataSource",
|
||||
"DataSourceConfig",
|
||||
"SystemSetting",
|
||||
"Alert",
|
||||
"AlertSeverity",
|
||||
"AlertStatus",
|
||||
"BGPAnomaly",
|
||||
]
|
||||
|
||||
@@ -5,6 +5,7 @@ from typing import Optional
|
||||
from sqlalchemy import Column, Integer, String, DateTime, Text, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
@@ -50,8 +51,8 @@ class Alert(Base):
|
||||
"acknowledged_by": self.acknowledged_by,
|
||||
"resolved_by": self.resolved_by,
|
||||
"resolution_notes": self.resolution_notes,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
||||
"acknowledged_at": self.acknowledged_at.isoformat() if self.acknowledged_at else None,
|
||||
"resolved_at": self.resolved_at.isoformat() if self.resolved_at else None,
|
||||
"created_at": to_iso8601_utc(self.created_at),
|
||||
"updated_at": to_iso8601_utc(self.updated_at),
|
||||
"acknowledged_at": to_iso8601_utc(self.acknowledged_at),
|
||||
"resolved_at": to_iso8601_utc(self.resolved_at),
|
||||
}
|
||||
|
||||
58
backend/app/models/bgp_anomaly.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""BGP anomaly model for derived routing intelligence."""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Column, DateTime, Float, ForeignKey, Index, Integer, JSON, String, Text
|
||||
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class BGPAnomaly(Base):
|
||||
__tablename__ = "bgp_anomalies"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
snapshot_id = Column(Integer, ForeignKey("data_snapshots.id"), nullable=True, index=True)
|
||||
task_id = Column(Integer, ForeignKey("collection_tasks.id"), nullable=True, index=True)
|
||||
source = Column(String(100), nullable=False, index=True)
|
||||
anomaly_type = Column(String(50), nullable=False, index=True)
|
||||
severity = Column(String(20), nullable=False, index=True)
|
||||
status = Column(String(20), nullable=False, default="active", index=True)
|
||||
entity_key = Column(String(255), nullable=False, index=True)
|
||||
prefix = Column(String(64), nullable=True, index=True)
|
||||
origin_asn = Column(Integer, nullable=True, index=True)
|
||||
new_origin_asn = Column(Integer, nullable=True, index=True)
|
||||
peer_scope = Column(JSON, default=list)
|
||||
started_at = Column(DateTime(timezone=True), nullable=False, default=datetime.utcnow, index=True)
|
||||
ended_at = Column(DateTime(timezone=True), nullable=True)
|
||||
confidence = Column(Float, nullable=False, default=0.5)
|
||||
summary = Column(Text, nullable=False)
|
||||
evidence = Column(JSON, default=dict)
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=datetime.utcnow, index=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_bgp_anomalies_source_created", "source", "created_at"),
|
||||
Index("idx_bgp_anomalies_type_status", "anomaly_type", "status"),
|
||||
)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"id": self.id,
|
||||
"snapshot_id": self.snapshot_id,
|
||||
"task_id": self.task_id,
|
||||
"source": self.source,
|
||||
"anomaly_type": self.anomaly_type,
|
||||
"severity": self.severity,
|
||||
"status": self.status,
|
||||
"entity_key": self.entity_key,
|
||||
"prefix": self.prefix,
|
||||
"origin_asn": self.origin_asn,
|
||||
"new_origin_asn": self.new_origin_asn,
|
||||
"peer_scope": self.peer_scope or [],
|
||||
"started_at": to_iso8601_utc(self.started_at),
|
||||
"ended_at": to_iso8601_utc(self.ended_at),
|
||||
"confidence": self.confidence,
|
||||
"summary": self.summary,
|
||||
"evidence": self.evidence or {},
|
||||
"created_at": to_iso8601_utc(self.created_at),
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
"""Collected Data model for storing data from all collectors"""
|
||||
|
||||
from sqlalchemy import Column, DateTime, Integer, String, Text, JSON, Index
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Text, JSON, Index
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.core.collected_data_fields import get_record_field
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
@@ -12,8 +14,11 @@ class CollectedData(Base):
|
||||
__tablename__ = "collected_data"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
snapshot_id = Column(Integer, ForeignKey("data_snapshots.id"), nullable=True, index=True)
|
||||
task_id = Column(Integer, ForeignKey("collection_tasks.id"), nullable=True, index=True)
|
||||
source = Column(String(100), nullable=False, index=True) # e.g., "top500", "huggingface_models"
|
||||
source_id = Column(String(100), index=True) # Original ID from source, e.g., "rank_1"
|
||||
entity_key = Column(String(255), index=True)
|
||||
data_type = Column(
|
||||
String(50), nullable=False, index=True
|
||||
) # e.g., "supercomputer", "model", "dataset"
|
||||
@@ -23,16 +28,6 @@ class CollectedData(Base):
|
||||
title = Column(String(500))
|
||||
description = Column(Text)
|
||||
|
||||
# Location data (for geo visualization)
|
||||
country = Column(String(100))
|
||||
city = Column(String(100))
|
||||
latitude = Column(String(50))
|
||||
longitude = Column(String(50))
|
||||
|
||||
# Performance metrics
|
||||
value = Column(String(100)) # Generic value field (Rmax, Rpeak, etc.)
|
||||
unit = Column(String(20))
|
||||
|
||||
# Additional metadata as JSON
|
||||
extra_data = Column(
|
||||
"metadata", JSON, default={}
|
||||
@@ -44,11 +39,17 @@ class CollectedData(Base):
|
||||
|
||||
# Status
|
||||
is_valid = Column(Integer, default=1) # 1=valid, 0=invalid
|
||||
is_current = Column(Boolean, default=True, index=True)
|
||||
previous_record_id = Column(Integer, ForeignKey("collected_data.id"), nullable=True, index=True)
|
||||
change_type = Column(String(20), nullable=True)
|
||||
change_summary = Column(JSON, default={})
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Indexes for common queries
|
||||
__table_args__ = (
|
||||
Index("idx_collected_data_source_collected", "source", "collected_at"),
|
||||
Index("idx_collected_data_source_type", "source", "data_type"),
|
||||
Index("idx_collected_data_source_source_id", "source", "source_id"),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -58,23 +59,27 @@ class CollectedData(Base):
|
||||
"""Convert to dictionary"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"snapshot_id": self.snapshot_id,
|
||||
"task_id": self.task_id,
|
||||
"source": self.source,
|
||||
"source_id": self.source_id,
|
||||
"entity_key": self.entity_key,
|
||||
"data_type": self.data_type,
|
||||
"name": self.name,
|
||||
"title": self.title,
|
||||
"description": self.description,
|
||||
"country": self.country,
|
||||
"city": self.city,
|
||||
"latitude": self.latitude,
|
||||
"longitude": self.longitude,
|
||||
"value": self.value,
|
||||
"unit": self.unit,
|
||||
"country": get_record_field(self, "country"),
|
||||
"city": get_record_field(self, "city"),
|
||||
"latitude": get_record_field(self, "latitude"),
|
||||
"longitude": get_record_field(self, "longitude"),
|
||||
"value": get_record_field(self, "value"),
|
||||
"unit": get_record_field(self, "unit"),
|
||||
"metadata": self.extra_data,
|
||||
"collected_at": self.collected_at.isoformat()
|
||||
if self.collected_at is not None
|
||||
else None,
|
||||
"reference_date": self.reference_date.isoformat()
|
||||
if self.reference_date is not None
|
||||
else None,
|
||||
"collected_at": to_iso8601_utc(self.collected_at),
|
||||
"reference_date": to_iso8601_utc(self.reference_date),
|
||||
"is_current": self.is_current,
|
||||
"previous_record_id": self.previous_record_id,
|
||||
"change_type": self.change_type,
|
||||
"change_summary": self.change_summary,
|
||||
"deleted_at": to_iso8601_utc(self.deleted_at),
|
||||
}
|
||||
|
||||
26
backend/app/models/data_snapshot.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, JSON, String
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class DataSnapshot(Base):
|
||||
__tablename__ = "data_snapshots"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
datasource_id = Column(Integer, nullable=False, index=True)
|
||||
task_id = Column(Integer, ForeignKey("collection_tasks.id"), nullable=True, index=True)
|
||||
source = Column(String(100), nullable=False, index=True)
|
||||
snapshot_key = Column(String(100), nullable=True, index=True)
|
||||
reference_date = Column(DateTime(timezone=True), nullable=True)
|
||||
started_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
record_count = Column(Integer, default=0)
|
||||
status = Column(String(20), nullable=False, default="running")
|
||||
is_current = Column(Boolean, default=True, index=True)
|
||||
parent_snapshot_id = Column(Integer, ForeignKey("data_snapshots.id"), nullable=True, index=True)
|
||||
summary = Column(JSON, default={})
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
def __repr__(self):
|
||||
return f"<DataSnapshot {self.id}: {self.source}/{self.status}>"
|
||||
19
backend/app/models/system_setting.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Persistent system settings model."""
|
||||
|
||||
from sqlalchemy import JSON, Column, DateTime, Integer, String, UniqueConstraint
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class SystemSetting(Base):
|
||||
__tablename__ = "system_settings"
|
||||
__table_args__ = (UniqueConstraint("category", name="uq_system_settings_category"),)
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
category = Column(String(50), nullable=False)
|
||||
payload = Column(JSON, nullable=False, default={})
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SystemSetting {self.category}>"
|
||||
@@ -12,6 +12,7 @@ class CollectionTask(Base):
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
datasource_id = Column(Integer, nullable=False, index=True)
|
||||
status = Column(String(20), nullable=False) # pending, running, success, failed, cancelled
|
||||
phase = Column(String(30), default="queued")
|
||||
started_at = Column(DateTime(timezone=True))
|
||||
completed_at = Column(DateTime(timezone=True))
|
||||
records_processed = Column(Integer, default=0)
|
||||
|
||||
@@ -28,6 +28,10 @@ from app.services.collectors.arcgis_cables import ArcGISCableCollector
|
||||
from app.services.collectors.fao_landing import FAOLandingPointCollector
|
||||
from app.services.collectors.arcgis_landing import ArcGISLandingPointCollector
|
||||
from app.services.collectors.arcgis_relation import ArcGISCableLandingRelationCollector
|
||||
from app.services.collectors.spacetrack import SpaceTrackTLECollector
|
||||
from app.services.collectors.celestrak import CelesTrakTLECollector
|
||||
from app.services.collectors.ris_live import RISLiveCollector
|
||||
from app.services.collectors.bgpstream import BGPStreamBackfillCollector
|
||||
|
||||
collector_registry.register(TOP500Collector())
|
||||
collector_registry.register(EpochAIGPUCollector())
|
||||
@@ -47,3 +51,7 @@ collector_registry.register(ArcGISCableCollector())
|
||||
collector_registry.register(FAOLandingPointCollector())
|
||||
collector_registry.register(ArcGISLandingPointCollector())
|
||||
collector_registry.register(ArcGISCableLandingRelationCollector())
|
||||
collector_registry.register(SpaceTrackTLECollector())
|
||||
collector_registry.register(CelesTrakTLECollector())
|
||||
collector_registry.register(RISLiveCollector())
|
||||
collector_registry.register(BGPStreamBackfillCollector())
|
||||
|
||||
@@ -5,7 +5,7 @@ Collects submarine cable data from ArcGIS GeoJSON API.
|
||||
|
||||
import json
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
import httpx
|
||||
|
||||
from app.services.collectors.base import BaseCollector
|
||||
@@ -84,7 +84,7 @@ class ArcGISCableCollector(BaseCollector):
|
||||
"color": props.get("color"),
|
||||
"route_coordinates": route_coordinates,
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
result.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
import httpx
|
||||
|
||||
from app.services.collectors.base import BaseCollector
|
||||
@@ -39,6 +39,11 @@ class ArcGISLandingPointCollector(BaseCollector):
|
||||
props = feature.get("properties", {})
|
||||
geometry = feature.get("geometry", {})
|
||||
|
||||
if geometry.get("type") == "Point":
|
||||
coords = geometry.get("coordinates", [])
|
||||
lon = coords[0] if len(coords) > 0 else None
|
||||
lat = coords[1] if len(coords) > 1 else None
|
||||
else:
|
||||
lat = geometry.get("y") if geometry else None
|
||||
lon = geometry.get("x") if geometry else None
|
||||
|
||||
@@ -54,6 +59,7 @@ class ArcGISLandingPointCollector(BaseCollector):
|
||||
"unit": "",
|
||||
"metadata": {
|
||||
"objectid": props.get("OBJECTID"),
|
||||
"city_id": props.get("city_id"),
|
||||
"cable_id": props.get("cable_id"),
|
||||
"cable_name": props.get("cable_name"),
|
||||
"facility": props.get("facility"),
|
||||
@@ -61,7 +67,7 @@ class ArcGISLandingPointCollector(BaseCollector):
|
||||
"status": props.get("status"),
|
||||
"landing_point_id": props.get("landing_point_id"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
result.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
import asyncio
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from app.services.collectors.base import BaseCollector
|
||||
from app.core.data_sources import get_data_sources_config
|
||||
|
||||
from app.services.collectors.base import BaseCollector
|
||||
|
||||
|
||||
class ArcGISCableLandingRelationCollector(BaseCollector):
|
||||
@@ -18,46 +19,131 @@ class ArcGISCableLandingRelationCollector(BaseCollector):
|
||||
def base_url(self) -> str:
|
||||
if self._resolved_url:
|
||||
return self._resolved_url
|
||||
from app.core.data_sources import get_data_sources_config
|
||||
|
||||
config = get_data_sources_config()
|
||||
return config.get_yaml_url("arcgis_cable_landing_relation")
|
||||
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
params = {"where": "1=1", "outFields": "*", "returnGeometry": "true", "f": "geojson"}
|
||||
def _layer_url(self, layer_id: int) -> str:
|
||||
if "/FeatureServer/" not in self.base_url:
|
||||
return self.base_url
|
||||
prefix = self.base_url.split("/FeatureServer/")[0]
|
||||
return f"{prefix}/FeatureServer/{layer_id}/query"
|
||||
|
||||
async with httpx.AsyncClient(timeout=60.0) as client:
|
||||
response = await client.get(self.base_url, params=params)
|
||||
async def _fetch_layer_attributes(
|
||||
self, client: httpx.AsyncClient, layer_id: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
response = await client.get(
|
||||
self._layer_url(layer_id),
|
||||
params={
|
||||
"where": "1=1",
|
||||
"outFields": "*",
|
||||
"returnGeometry": "false",
|
||||
"f": "json",
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
return self.parse_response(response.json())
|
||||
data = response.json()
|
||||
return [feature.get("attributes", {}) for feature in data.get("features", [])]
|
||||
|
||||
def parse_response(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
result = []
|
||||
async def _fetch_relation_features(self, client: httpx.AsyncClient) -> List[Dict[str, Any]]:
|
||||
response = await client.get(
|
||||
self.base_url,
|
||||
params={
|
||||
"where": "1=1",
|
||||
"outFields": "*",
|
||||
"returnGeometry": "true",
|
||||
"f": "geojson",
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
return data.get("features", [])
|
||||
|
||||
features = data.get("features", [])
|
||||
for feature in features:
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
async with httpx.AsyncClient(timeout=60.0) as client:
|
||||
relation_features, landing_rows, cable_rows = await asyncio.gather(
|
||||
self._fetch_relation_features(client),
|
||||
self._fetch_layer_attributes(client, 1),
|
||||
self._fetch_layer_attributes(client, 2),
|
||||
)
|
||||
return self.parse_response(relation_features, landing_rows, cable_rows)
|
||||
|
||||
def _build_landing_lookup(self, landing_rows: List[Dict[str, Any]]) -> Dict[int, Dict[str, Any]]:
|
||||
lookup: Dict[int, Dict[str, Any]] = {}
|
||||
for row in landing_rows:
|
||||
city_id = row.get("city_id")
|
||||
if city_id is None:
|
||||
continue
|
||||
lookup[int(city_id)] = {
|
||||
"landing_point_id": row.get("landing_point_id") or city_id,
|
||||
"landing_point_name": row.get("Name") or row.get("name") or "",
|
||||
"facility": row.get("facility") or "",
|
||||
"status": row.get("status") or "",
|
||||
"country": row.get("country") or "",
|
||||
}
|
||||
return lookup
|
||||
|
||||
def _build_cable_lookup(self, cable_rows: List[Dict[str, Any]]) -> Dict[int, Dict[str, Any]]:
|
||||
lookup: Dict[int, Dict[str, Any]] = {}
|
||||
for row in cable_rows:
|
||||
cable_id = row.get("cable_id")
|
||||
if cable_id is None:
|
||||
continue
|
||||
lookup[int(cable_id)] = {
|
||||
"cable_name": row.get("Name") or "",
|
||||
"status": row.get("status") or "active",
|
||||
}
|
||||
return lookup
|
||||
|
||||
def parse_response(
|
||||
self,
|
||||
relation_features: List[Dict[str, Any]],
|
||||
landing_rows: List[Dict[str, Any]],
|
||||
cable_rows: List[Dict[str, Any]],
|
||||
) -> List[Dict[str, Any]]:
|
||||
result: List[Dict[str, Any]] = []
|
||||
landing_lookup = self._build_landing_lookup(landing_rows)
|
||||
cable_lookup = self._build_cable_lookup(cable_rows)
|
||||
|
||||
for feature in relation_features:
|
||||
props = feature.get("properties", {})
|
||||
|
||||
try:
|
||||
city_id = props.get("city_id")
|
||||
cable_id = props.get("cable_id")
|
||||
landing_info = landing_lookup.get(int(city_id), {}) if city_id is not None else {}
|
||||
cable_info = cable_lookup.get(int(cable_id), {}) if cable_id is not None else {}
|
||||
|
||||
cable_name = cable_info.get("cable_name") or props.get("cable_name") or "Unknown"
|
||||
landing_point_name = (
|
||||
landing_info.get("landing_point_name")
|
||||
or props.get("landing_point_name")
|
||||
or "Unknown"
|
||||
)
|
||||
facility = landing_info.get("facility") or props.get("facility") or "-"
|
||||
status = cable_info.get("status") or landing_info.get("status") or props.get("status") or "-"
|
||||
country = landing_info.get("country") or props.get("country") or ""
|
||||
landing_point_id = landing_info.get("landing_point_id") or props.get("landing_point_id") or city_id
|
||||
|
||||
entry = {
|
||||
"source_id": f"arcgis_relation_{props.get('OBJECTID', props.get('id', ''))}",
|
||||
"name": f"{props.get('cable_name', 'Unknown')} - {props.get('landing_point_name', 'Unknown')}",
|
||||
"country": props.get("country", ""),
|
||||
"city": props.get("landing_point_name", ""),
|
||||
"name": f"{cable_name} - {landing_point_name}",
|
||||
"country": country,
|
||||
"city": landing_point_name,
|
||||
"latitude": str(props.get("latitude", "")) if props.get("latitude") else "",
|
||||
"longitude": str(props.get("longitude", "")) if props.get("longitude") else "",
|
||||
"value": "",
|
||||
"unit": "",
|
||||
"metadata": {
|
||||
"objectid": props.get("OBJECTID"),
|
||||
"cable_id": props.get("cable_id"),
|
||||
"cable_name": props.get("cable_name"),
|
||||
"landing_point_id": props.get("landing_point_id"),
|
||||
"landing_point_name": props.get("landing_point_name"),
|
||||
"facility": props.get("facility"),
|
||||
"status": props.get("status"),
|
||||
"city_id": city_id,
|
||||
"cable_id": cable_id,
|
||||
"cable_name": cable_name,
|
||||
"landing_point_id": landing_point_id,
|
||||
"landing_point_name": landing_point_name,
|
||||
"facility": facility,
|
||||
"status": status,
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
result.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
|
||||
@@ -2,12 +2,16 @@
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, List, Any, Optional
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
import httpx
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy import select, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.collected_data_fields import build_dynamic_metadata, get_record_field
|
||||
from app.core.config import settings
|
||||
from app.core.countries import normalize_country
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.core.websocket.broadcaster import broadcaster
|
||||
|
||||
|
||||
class BaseCollector(ABC):
|
||||
@@ -18,12 +22,14 @@ class BaseCollector(ABC):
|
||||
module: str = "L1"
|
||||
frequency_hours: int = 4
|
||||
data_type: str = "generic"
|
||||
fail_on_empty: bool = False
|
||||
|
||||
def __init__(self):
|
||||
self._current_task = None
|
||||
self._db_session = None
|
||||
self._datasource_id = 1
|
||||
self._resolved_url: Optional[str] = None
|
||||
self._last_broadcast_progress: Optional[int] = None
|
||||
|
||||
async def resolve_url(self, db: AsyncSession) -> None:
|
||||
from app.core.data_sources import get_data_sources_config
|
||||
@@ -31,13 +37,53 @@ class BaseCollector(ABC):
|
||||
config = get_data_sources_config()
|
||||
self._resolved_url = await config.get_url(self.name, db)
|
||||
|
||||
def update_progress(self, records_processed: int):
|
||||
async def _publish_task_update(self, force: bool = False):
|
||||
if not self._current_task:
|
||||
return
|
||||
|
||||
progress = float(self._current_task.progress or 0.0)
|
||||
rounded_progress = int(round(progress))
|
||||
if not force and self._last_broadcast_progress == rounded_progress:
|
||||
return
|
||||
|
||||
await broadcaster.broadcast_datasource_task_update(
|
||||
{
|
||||
"datasource_id": getattr(self, "_datasource_id", None),
|
||||
"collector_name": self.name,
|
||||
"task_id": self._current_task.id,
|
||||
"status": self._current_task.status,
|
||||
"phase": self._current_task.phase,
|
||||
"progress": progress,
|
||||
"records_processed": self._current_task.records_processed,
|
||||
"total_records": self._current_task.total_records,
|
||||
"started_at": to_iso8601_utc(self._current_task.started_at),
|
||||
"completed_at": to_iso8601_utc(self._current_task.completed_at),
|
||||
"error_message": self._current_task.error_message,
|
||||
}
|
||||
)
|
||||
self._last_broadcast_progress = rounded_progress
|
||||
|
||||
async def update_progress(self, records_processed: int, *, commit: bool = False, force: bool = False):
|
||||
"""Update task progress - call this during data processing"""
|
||||
if self._current_task and self._db_session and self._current_task.total_records > 0:
|
||||
if self._current_task and self._db_session:
|
||||
self._current_task.records_processed = records_processed
|
||||
if self._current_task.total_records and self._current_task.total_records > 0:
|
||||
self._current_task.progress = (
|
||||
records_processed / self._current_task.total_records
|
||||
) * 100
|
||||
else:
|
||||
self._current_task.progress = 0.0
|
||||
|
||||
if commit:
|
||||
await self._db_session.commit()
|
||||
|
||||
await self._publish_task_update(force=force)
|
||||
|
||||
async def set_phase(self, phase: str):
|
||||
if self._current_task and self._db_session:
|
||||
self._current_task.phase = phase
|
||||
await self._db_session.commit()
|
||||
await self._publish_task_update(force=True)
|
||||
|
||||
@abstractmethod
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
@@ -48,14 +94,87 @@ class BaseCollector(ABC):
|
||||
"""Transform raw data to internal format (default: pass through)"""
|
||||
return raw_data
|
||||
|
||||
def _parse_reference_date(self, value: Any) -> Optional[datetime]:
|
||||
if not value:
|
||||
return None
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
if isinstance(value, str):
|
||||
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
return None
|
||||
|
||||
def _build_comparable_payload(self, record: Any) -> Dict[str, Any]:
|
||||
return {
|
||||
"name": getattr(record, "name", None),
|
||||
"title": getattr(record, "title", None),
|
||||
"description": getattr(record, "description", None),
|
||||
"country": get_record_field(record, "country"),
|
||||
"city": get_record_field(record, "city"),
|
||||
"latitude": get_record_field(record, "latitude"),
|
||||
"longitude": get_record_field(record, "longitude"),
|
||||
"value": get_record_field(record, "value"),
|
||||
"unit": get_record_field(record, "unit"),
|
||||
"metadata": getattr(record, "extra_data", None) or {},
|
||||
"reference_date": (
|
||||
getattr(record, "reference_date", None).isoformat()
|
||||
if getattr(record, "reference_date", None)
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
async def _create_snapshot(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
task_id: int,
|
||||
data: List[Dict[str, Any]],
|
||||
started_at: datetime,
|
||||
) -> int:
|
||||
from app.models.data_snapshot import DataSnapshot
|
||||
|
||||
reference_dates = [
|
||||
parsed
|
||||
for parsed in (self._parse_reference_date(item.get("reference_date")) for item in data)
|
||||
if parsed is not None
|
||||
]
|
||||
reference_date = max(reference_dates) if reference_dates else None
|
||||
|
||||
result = await db.execute(
|
||||
select(DataSnapshot)
|
||||
.where(DataSnapshot.source == self.name, DataSnapshot.is_current == True)
|
||||
.order_by(DataSnapshot.completed_at.desc().nullslast(), DataSnapshot.id.desc())
|
||||
.limit(1)
|
||||
)
|
||||
previous_snapshot = result.scalar_one_or_none()
|
||||
|
||||
snapshot = DataSnapshot(
|
||||
datasource_id=getattr(self, "_datasource_id", 1),
|
||||
task_id=task_id,
|
||||
source=self.name,
|
||||
snapshot_key=f"{self.name}:{task_id}",
|
||||
reference_date=reference_date,
|
||||
started_at=started_at,
|
||||
status="running",
|
||||
is_current=True,
|
||||
parent_snapshot_id=previous_snapshot.id if previous_snapshot else None,
|
||||
summary={},
|
||||
)
|
||||
db.add(snapshot)
|
||||
|
||||
if previous_snapshot:
|
||||
previous_snapshot.is_current = False
|
||||
|
||||
await db.commit()
|
||||
return snapshot.id
|
||||
|
||||
async def run(self, db: AsyncSession) -> Dict[str, Any]:
|
||||
"""Full pipeline: fetch -> transform -> save"""
|
||||
from app.services.collectors.registry import collector_registry
|
||||
from app.models.task import CollectionTask
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.models.data_snapshot import DataSnapshot
|
||||
|
||||
start_time = datetime.utcnow()
|
||||
start_time = datetime.now(UTC)
|
||||
datasource_id = getattr(self, "_datasource_id", 1)
|
||||
snapshot_id: Optional[int] = None
|
||||
|
||||
if not collector_registry.is_active(self.name):
|
||||
return {"status": "skipped", "reason": "Collector is disabled"}
|
||||
@@ -63,6 +182,7 @@ class BaseCollector(ABC):
|
||||
task = CollectionTask(
|
||||
datasource_id=datasource_id,
|
||||
status="running",
|
||||
phase="queued",
|
||||
started_at=start_time,
|
||||
)
|
||||
db.add(task)
|
||||
@@ -71,89 +191,221 @@ class BaseCollector(ABC):
|
||||
|
||||
self._current_task = task
|
||||
self._db_session = db
|
||||
self._last_broadcast_progress = None
|
||||
|
||||
await self.resolve_url(db)
|
||||
await self._publish_task_update(force=True)
|
||||
|
||||
try:
|
||||
await self.set_phase("fetching")
|
||||
raw_data = await self.fetch()
|
||||
task.total_records = len(raw_data)
|
||||
await db.commit()
|
||||
await self._publish_task_update(force=True)
|
||||
|
||||
if self.fail_on_empty and not raw_data:
|
||||
raise RuntimeError(f"Collector {self.name} returned no data")
|
||||
|
||||
await self.set_phase("transforming")
|
||||
data = self.transform(raw_data)
|
||||
snapshot_id = await self._create_snapshot(db, task_id, data, start_time)
|
||||
|
||||
records_count = await self._save_data(db, data)
|
||||
await self.set_phase("saving")
|
||||
records_count = await self._save_data(db, data, task_id=task_id, snapshot_id=snapshot_id)
|
||||
|
||||
task.status = "success"
|
||||
task.phase = "completed"
|
||||
task.records_processed = records_count
|
||||
task.progress = 100.0
|
||||
task.completed_at = datetime.utcnow()
|
||||
task.completed_at = datetime.now(UTC)
|
||||
await db.commit()
|
||||
await self._publish_task_update(force=True)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"task_id": task_id,
|
||||
"records_processed": records_count,
|
||||
"execution_time_seconds": (datetime.utcnow() - start_time).total_seconds(),
|
||||
"execution_time_seconds": (datetime.now(UTC) - start_time).total_seconds(),
|
||||
}
|
||||
except Exception as e:
|
||||
task.status = "failed"
|
||||
task.phase = "failed"
|
||||
task.error_message = str(e)
|
||||
task.completed_at = datetime.utcnow()
|
||||
task.completed_at = datetime.now(UTC)
|
||||
if snapshot_id is not None:
|
||||
snapshot = await db.get(DataSnapshot, snapshot_id)
|
||||
if snapshot:
|
||||
snapshot.status = "failed"
|
||||
snapshot.completed_at = datetime.now(UTC)
|
||||
snapshot.summary = {"error": str(e)}
|
||||
await db.commit()
|
||||
await self._publish_task_update(force=True)
|
||||
|
||||
return {
|
||||
"status": "failed",
|
||||
"task_id": task_id,
|
||||
"error": str(e),
|
||||
"execution_time_seconds": (datetime.utcnow() - start_time).total_seconds(),
|
||||
"execution_time_seconds": (datetime.now(UTC) - start_time).total_seconds(),
|
||||
}
|
||||
|
||||
async def _save_data(self, db: AsyncSession, data: List[Dict[str, Any]]) -> int:
|
||||
async def _save_data(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
data: List[Dict[str, Any]],
|
||||
task_id: Optional[int] = None,
|
||||
snapshot_id: Optional[int] = None,
|
||||
) -> int:
|
||||
"""Save transformed data to database"""
|
||||
from app.models.collected_data import CollectedData
|
||||
from app.models.data_snapshot import DataSnapshot
|
||||
|
||||
if not data:
|
||||
if snapshot_id is not None:
|
||||
snapshot = await db.get(DataSnapshot, snapshot_id)
|
||||
if snapshot:
|
||||
snapshot.record_count = 0
|
||||
snapshot.summary = {"created": 0, "updated": 0, "unchanged": 0}
|
||||
snapshot.status = "success"
|
||||
snapshot.completed_at = datetime.now(UTC)
|
||||
await db.commit()
|
||||
return 0
|
||||
|
||||
collected_at = datetime.utcnow()
|
||||
collected_at = datetime.now(UTC)
|
||||
records_added = 0
|
||||
created_count = 0
|
||||
updated_count = 0
|
||||
unchanged_count = 0
|
||||
seen_entity_keys: set[str] = set()
|
||||
previous_current_keys: set[str] = set()
|
||||
|
||||
previous_current_result = await db.execute(
|
||||
select(CollectedData.entity_key).where(
|
||||
CollectedData.source == self.name,
|
||||
CollectedData.is_current == True,
|
||||
)
|
||||
)
|
||||
previous_current_keys = {row[0] for row in previous_current_result.fetchall() if row[0]}
|
||||
|
||||
for i, item in enumerate(data):
|
||||
print(
|
||||
f"DEBUG: Saving item {i}: name={item.get('name')}, metadata={item.get('metadata', 'NOT FOUND')}"
|
||||
)
|
||||
raw_metadata = item.get("metadata", {})
|
||||
extra_data = build_dynamic_metadata(
|
||||
raw_metadata,
|
||||
country=item.get("country"),
|
||||
city=item.get("city"),
|
||||
latitude=item.get("latitude"),
|
||||
longitude=item.get("longitude"),
|
||||
value=item.get("value"),
|
||||
unit=item.get("unit"),
|
||||
)
|
||||
normalized_country = normalize_country(item.get("country"))
|
||||
if normalized_country is not None:
|
||||
extra_data["country"] = normalized_country
|
||||
|
||||
if item.get("country") and normalized_country != item.get("country"):
|
||||
extra_data["raw_country"] = item.get("country")
|
||||
if normalized_country is None:
|
||||
extra_data["country_validation"] = "invalid"
|
||||
|
||||
source_id = item.get("source_id") or item.get("id")
|
||||
reference_date = (
|
||||
self._parse_reference_date(item.get("reference_date"))
|
||||
)
|
||||
source_id_str = str(source_id) if source_id is not None else None
|
||||
entity_key = f"{self.name}:{source_id_str}" if source_id_str else f"{self.name}:{i}"
|
||||
previous_record = None
|
||||
|
||||
if entity_key and entity_key not in seen_entity_keys:
|
||||
result = await db.execute(
|
||||
select(CollectedData)
|
||||
.where(
|
||||
CollectedData.source == self.name,
|
||||
CollectedData.entity_key == entity_key,
|
||||
CollectedData.is_current == True,
|
||||
)
|
||||
.order_by(CollectedData.collected_at.desc().nullslast(), CollectedData.id.desc())
|
||||
)
|
||||
previous_records = result.scalars().all()
|
||||
if previous_records:
|
||||
previous_record = previous_records[0]
|
||||
for old_record in previous_records:
|
||||
old_record.is_current = False
|
||||
|
||||
record = CollectedData(
|
||||
snapshot_id=snapshot_id,
|
||||
task_id=task_id,
|
||||
source=self.name,
|
||||
source_id=item.get("source_id") or item.get("id"),
|
||||
source_id=source_id_str,
|
||||
entity_key=entity_key,
|
||||
data_type=self.data_type,
|
||||
name=item.get("name"),
|
||||
title=item.get("title"),
|
||||
description=item.get("description"),
|
||||
country=item.get("country"),
|
||||
city=item.get("city"),
|
||||
latitude=str(item.get("latitude", ""))
|
||||
if item.get("latitude") is not None
|
||||
else None,
|
||||
longitude=str(item.get("longitude", ""))
|
||||
if item.get("longitude") is not None
|
||||
else None,
|
||||
value=item.get("value"),
|
||||
unit=item.get("unit"),
|
||||
extra_data=item.get("metadata", {}),
|
||||
extra_data=extra_data,
|
||||
collected_at=collected_at,
|
||||
reference_date=datetime.fromisoformat(
|
||||
item.get("reference_date").replace("Z", "+00:00")
|
||||
)
|
||||
if item.get("reference_date")
|
||||
else None,
|
||||
reference_date=reference_date,
|
||||
is_valid=1,
|
||||
is_current=True,
|
||||
previous_record_id=previous_record.id if previous_record else None,
|
||||
deleted_at=None,
|
||||
)
|
||||
|
||||
if previous_record is None:
|
||||
record.change_type = "created"
|
||||
record.change_summary = {}
|
||||
created_count += 1
|
||||
else:
|
||||
previous_payload = self._build_comparable_payload(previous_record)
|
||||
current_payload = self._build_comparable_payload(record)
|
||||
if current_payload == previous_payload:
|
||||
record.change_type = "unchanged"
|
||||
record.change_summary = {}
|
||||
unchanged_count += 1
|
||||
else:
|
||||
changed_fields = [
|
||||
key for key in current_payload.keys() if current_payload[key] != previous_payload.get(key)
|
||||
]
|
||||
record.change_type = "updated"
|
||||
record.change_summary = {"changed_fields": changed_fields}
|
||||
updated_count += 1
|
||||
|
||||
db.add(record)
|
||||
seen_entity_keys.add(entity_key)
|
||||
records_added += 1
|
||||
|
||||
if i % 100 == 0:
|
||||
self.update_progress(i + 1)
|
||||
await db.commit()
|
||||
await self.update_progress(i + 1, commit=True)
|
||||
|
||||
if snapshot_id is not None:
|
||||
deleted_keys = previous_current_keys - seen_entity_keys
|
||||
await db.execute(
|
||||
text(
|
||||
"""
|
||||
UPDATE collected_data
|
||||
SET is_current = FALSE
|
||||
WHERE source = :source
|
||||
AND snapshot_id IS DISTINCT FROM :snapshot_id
|
||||
AND COALESCE(is_current, TRUE) = TRUE
|
||||
"""
|
||||
),
|
||||
{"source": self.name, "snapshot_id": snapshot_id},
|
||||
)
|
||||
snapshot = await db.get(DataSnapshot, snapshot_id)
|
||||
if snapshot:
|
||||
snapshot.record_count = records_added
|
||||
snapshot.status = "success"
|
||||
snapshot.completed_at = datetime.now(UTC)
|
||||
snapshot.summary = {
|
||||
"created": created_count,
|
||||
"updated": updated_count,
|
||||
"unchanged": unchanged_count,
|
||||
"deleted": len(deleted_keys),
|
||||
}
|
||||
|
||||
await db.commit()
|
||||
self.update_progress(len(data))
|
||||
await self.update_progress(len(data), force=True)
|
||||
return records_added
|
||||
|
||||
async def save(self, db: AsyncSession, data: List[Dict[str, Any]]) -> int:
|
||||
@@ -200,8 +452,8 @@ async def log_task(
|
||||
status=status,
|
||||
records_processed=records_processed,
|
||||
error_message=error_message,
|
||||
started_at=datetime.utcnow(),
|
||||
completed_at=datetime.utcnow(),
|
||||
started_at=datetime.now(UTC),
|
||||
completed_at=datetime.now(UTC),
|
||||
)
|
||||
db.add(task)
|
||||
await db.commit()
|
||||
|
||||
313
backend/app/services/collectors/bgp_common.py
Normal file
@@ -0,0 +1,313 @@
|
||||
"""Shared helpers for BGP collectors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import ipaddress
|
||||
from collections import Counter, defaultdict
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.bgp_anomaly import BGPAnomaly
|
||||
from app.models.collected_data import CollectedData
|
||||
|
||||
|
||||
RIPE_RIS_COLLECTOR_COORDS: dict[str, dict[str, Any]] = {
|
||||
"rrc00": {"city": "Amsterdam", "country": "Netherlands", "latitude": 52.3676, "longitude": 4.9041},
|
||||
"rrc01": {"city": "London", "country": "United Kingdom", "latitude": 51.5072, "longitude": -0.1276},
|
||||
"rrc03": {"city": "Amsterdam", "country": "Netherlands", "latitude": 52.3676, "longitude": 4.9041},
|
||||
"rrc04": {"city": "Geneva", "country": "Switzerland", "latitude": 46.2044, "longitude": 6.1432},
|
||||
"rrc05": {"city": "Vienna", "country": "Austria", "latitude": 48.2082, "longitude": 16.3738},
|
||||
"rrc06": {"city": "Otemachi", "country": "Japan", "latitude": 35.686, "longitude": 139.7671},
|
||||
"rrc07": {"city": "Stockholm", "country": "Sweden", "latitude": 59.3293, "longitude": 18.0686},
|
||||
"rrc10": {"city": "Milan", "country": "Italy", "latitude": 45.4642, "longitude": 9.19},
|
||||
"rrc11": {"city": "New York", "country": "United States", "latitude": 40.7128, "longitude": -74.006},
|
||||
"rrc12": {"city": "Frankfurt", "country": "Germany", "latitude": 50.1109, "longitude": 8.6821},
|
||||
"rrc13": {"city": "Moscow", "country": "Russia", "latitude": 55.7558, "longitude": 37.6173},
|
||||
"rrc14": {"city": "Palo Alto", "country": "United States", "latitude": 37.4419, "longitude": -122.143},
|
||||
"rrc15": {"city": "Sao Paulo", "country": "Brazil", "latitude": -23.5558, "longitude": -46.6396},
|
||||
"rrc16": {"city": "Miami", "country": "United States", "latitude": 25.7617, "longitude": -80.1918},
|
||||
"rrc18": {"city": "Barcelona", "country": "Spain", "latitude": 41.3874, "longitude": 2.1686},
|
||||
"rrc19": {"city": "Johannesburg", "country": "South Africa", "latitude": -26.2041, "longitude": 28.0473},
|
||||
"rrc20": {"city": "Zurich", "country": "Switzerland", "latitude": 47.3769, "longitude": 8.5417},
|
||||
"rrc21": {"city": "Paris", "country": "France", "latitude": 48.8566, "longitude": 2.3522},
|
||||
"rrc22": {"city": "Bucharest", "country": "Romania", "latitude": 44.4268, "longitude": 26.1025},
|
||||
"rrc23": {"city": "Singapore", "country": "Singapore", "latitude": 1.3521, "longitude": 103.8198},
|
||||
"rrc24": {"city": "Montevideo", "country": "Uruguay", "latitude": -34.9011, "longitude": -56.1645},
|
||||
"rrc25": {"city": "Amsterdam", "country": "Netherlands", "latitude": 52.3676, "longitude": 4.9041},
|
||||
"rrc26": {"city": "Dubai", "country": "United Arab Emirates", "latitude": 25.2048, "longitude": 55.2708},
|
||||
}
|
||||
|
||||
|
||||
def _safe_int(value: Any) -> int | None:
|
||||
try:
|
||||
if value in (None, ""):
|
||||
return None
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _parse_timestamp(value: Any) -> datetime:
|
||||
if isinstance(value, datetime):
|
||||
return value.astimezone(UTC) if value.tzinfo else value.replace(tzinfo=UTC)
|
||||
|
||||
if isinstance(value, (int, float)):
|
||||
return datetime.fromtimestamp(value, tz=UTC)
|
||||
|
||||
if isinstance(value, str) and value:
|
||||
normalized = value.replace("Z", "+00:00")
|
||||
parsed = datetime.fromisoformat(normalized)
|
||||
return parsed.astimezone(UTC) if parsed.tzinfo else parsed.replace(tzinfo=UTC)
|
||||
|
||||
return datetime.now(UTC)
|
||||
|
||||
|
||||
def _normalize_as_path(raw_path: Any) -> list[int]:
|
||||
if raw_path in (None, ""):
|
||||
return []
|
||||
if isinstance(raw_path, list):
|
||||
return [asn for asn in (_safe_int(item) for item in raw_path) if asn is not None]
|
||||
if isinstance(raw_path, str):
|
||||
parts = raw_path.replace("{", "").replace("}", "").split()
|
||||
return [asn for asn in (_safe_int(item) for item in parts) if asn is not None]
|
||||
return []
|
||||
|
||||
|
||||
def normalize_bgp_event(payload: dict[str, Any], *, project: str) -> dict[str, Any]:
|
||||
raw_message = payload.get("raw_message", payload)
|
||||
raw_path = (
|
||||
payload.get("path")
|
||||
or payload.get("as_path")
|
||||
or payload.get("attrs", {}).get("path")
|
||||
or payload.get("attrs", {}).get("as_path")
|
||||
or []
|
||||
)
|
||||
as_path = _normalize_as_path(raw_path)
|
||||
|
||||
raw_type = str(payload.get("event_type") or payload.get("type") or payload.get("msg_type") or "").lower()
|
||||
if raw_type in {"a", "announce", "announcement"}:
|
||||
event_type = "announcement"
|
||||
elif raw_type in {"w", "withdraw", "withdrawal"}:
|
||||
event_type = "withdrawal"
|
||||
elif raw_type in {"r", "rib"}:
|
||||
event_type = "rib"
|
||||
else:
|
||||
event_type = raw_type or "announcement"
|
||||
|
||||
prefix = str(payload.get("prefix") or payload.get("prefixes") or payload.get("target_prefix") or "").strip()
|
||||
if prefix.startswith("[") and prefix.endswith("]"):
|
||||
prefix = prefix[1:-1]
|
||||
|
||||
timestamp = _parse_timestamp(payload.get("timestamp") or payload.get("time") or payload.get("ts"))
|
||||
collector = str(payload.get("collector") or payload.get("host") or payload.get("router") or "unknown")
|
||||
peer_asn = _safe_int(payload.get("peer_asn") or payload.get("peer"))
|
||||
origin_asn = _safe_int(payload.get("origin_asn")) or (as_path[-1] if as_path else None)
|
||||
source_material = "|".join(
|
||||
[
|
||||
collector,
|
||||
str(peer_asn or ""),
|
||||
prefix,
|
||||
event_type,
|
||||
timestamp.isoformat(),
|
||||
",".join(str(asn) for asn in as_path),
|
||||
]
|
||||
)
|
||||
source_id = hashlib.sha1(source_material.encode("utf-8")).hexdigest()[:24]
|
||||
|
||||
prefix_length = None
|
||||
is_more_specific = False
|
||||
if prefix:
|
||||
try:
|
||||
network = ipaddress.ip_network(prefix, strict=False)
|
||||
prefix_length = int(network.prefixlen)
|
||||
is_more_specific = prefix_length > (24 if network.version == 4 else 48)
|
||||
except ValueError:
|
||||
prefix_length = None
|
||||
|
||||
collector_location = RIPE_RIS_COLLECTOR_COORDS.get(collector, {})
|
||||
metadata = {
|
||||
"project": project,
|
||||
"collector": collector,
|
||||
"peer_asn": peer_asn,
|
||||
"peer_ip": payload.get("peer_ip") or payload.get("peer_address"),
|
||||
"event_type": event_type,
|
||||
"prefix": prefix,
|
||||
"origin_asn": origin_asn,
|
||||
"as_path": as_path,
|
||||
"communities": payload.get("communities") or payload.get("attrs", {}).get("communities") or [],
|
||||
"next_hop": payload.get("next_hop") or payload.get("attrs", {}).get("next_hop"),
|
||||
"med": payload.get("med") or payload.get("attrs", {}).get("med"),
|
||||
"local_pref": payload.get("local_pref") or payload.get("attrs", {}).get("local_pref"),
|
||||
"timestamp": timestamp.isoformat(),
|
||||
"as_path_length": len(as_path),
|
||||
"prefix_length": prefix_length,
|
||||
"is_more_specific": is_more_specific,
|
||||
"visibility_weight": 1,
|
||||
"collector_location": collector_location,
|
||||
"raw_message": raw_message,
|
||||
}
|
||||
|
||||
return {
|
||||
"source_id": source_id,
|
||||
"name": prefix or f"{collector}:{event_type}",
|
||||
"title": f"{event_type} {prefix}".strip(),
|
||||
"description": f"{collector} observed {event_type} for {prefix}".strip(),
|
||||
"reference_date": timestamp.isoformat(),
|
||||
"country": collector_location.get("country"),
|
||||
"city": collector_location.get("city"),
|
||||
"latitude": collector_location.get("latitude"),
|
||||
"longitude": collector_location.get("longitude"),
|
||||
"metadata": metadata,
|
||||
}
|
||||
|
||||
|
||||
async def create_bgp_anomalies_for_batch(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
source: str,
|
||||
snapshot_id: int | None,
|
||||
task_id: int | None,
|
||||
events: list[dict[str, Any]],
|
||||
) -> int:
|
||||
if not events:
|
||||
return 0
|
||||
|
||||
pending_anomalies: list[BGPAnomaly] = []
|
||||
prefix_to_origins: defaultdict[str, set[int]] = defaultdict(set)
|
||||
prefix_to_more_specifics: defaultdict[str, list[dict[str, Any]]] = defaultdict(list)
|
||||
withdrawal_counter: Counter[tuple[str, int | None]] = Counter()
|
||||
|
||||
prefixes = {event["metadata"].get("prefix") for event in events if event.get("metadata", {}).get("prefix")}
|
||||
previous_origin_map: dict[str, set[int]] = defaultdict(set)
|
||||
|
||||
if prefixes:
|
||||
previous_query = await db.execute(
|
||||
select(CollectedData).where(
|
||||
CollectedData.source == source,
|
||||
CollectedData.snapshot_id != snapshot_id,
|
||||
CollectedData.extra_data["prefix"].as_string().in_(sorted(prefixes)),
|
||||
)
|
||||
)
|
||||
for record in previous_query.scalars().all():
|
||||
metadata = record.extra_data or {}
|
||||
prefix = metadata.get("prefix")
|
||||
origin = _safe_int(metadata.get("origin_asn"))
|
||||
if prefix and origin is not None:
|
||||
previous_origin_map[prefix].add(origin)
|
||||
|
||||
for event in events:
|
||||
metadata = event.get("metadata", {})
|
||||
prefix = metadata.get("prefix")
|
||||
origin_asn = _safe_int(metadata.get("origin_asn"))
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
if origin_asn is not None:
|
||||
prefix_to_origins[prefix].add(origin_asn)
|
||||
|
||||
if metadata.get("is_more_specific"):
|
||||
prefix_to_more_specifics[prefix.split("/")[0]].append(event)
|
||||
|
||||
if metadata.get("event_type") == "withdrawal":
|
||||
withdrawal_counter[(prefix, origin_asn)] += 1
|
||||
|
||||
for prefix, origins in prefix_to_origins.items():
|
||||
historic = previous_origin_map.get(prefix, set())
|
||||
new_origins = sorted(origin for origin in origins if origin not in historic)
|
||||
if historic and new_origins:
|
||||
for new_origin in new_origins:
|
||||
pending_anomalies.append(
|
||||
BGPAnomaly(
|
||||
snapshot_id=snapshot_id,
|
||||
task_id=task_id,
|
||||
source=source,
|
||||
anomaly_type="origin_change",
|
||||
severity="critical",
|
||||
status="active",
|
||||
entity_key=f"origin_change:{prefix}:{new_origin}",
|
||||
prefix=prefix,
|
||||
origin_asn=sorted(historic)[0],
|
||||
new_origin_asn=new_origin,
|
||||
peer_scope=[],
|
||||
started_at=datetime.now(UTC),
|
||||
confidence=0.86,
|
||||
summary=f"Prefix {prefix} is now originated by AS{new_origin}, outside the current baseline.",
|
||||
evidence={"previous_origins": sorted(historic), "current_origins": sorted(origins)},
|
||||
)
|
||||
)
|
||||
|
||||
for root_prefix, more_specifics in prefix_to_more_specifics.items():
|
||||
if len(more_specifics) >= 2:
|
||||
sample = more_specifics[0]["metadata"]
|
||||
pending_anomalies.append(
|
||||
BGPAnomaly(
|
||||
snapshot_id=snapshot_id,
|
||||
task_id=task_id,
|
||||
source=source,
|
||||
anomaly_type="more_specific_burst",
|
||||
severity="high",
|
||||
status="active",
|
||||
entity_key=f"more_specific_burst:{root_prefix}:{len(more_specifics)}",
|
||||
prefix=sample.get("prefix"),
|
||||
origin_asn=_safe_int(sample.get("origin_asn")),
|
||||
new_origin_asn=None,
|
||||
peer_scope=sorted(
|
||||
{
|
||||
str(item.get("metadata", {}).get("collector") or "")
|
||||
for item in more_specifics
|
||||
if item.get("metadata", {}).get("collector")
|
||||
}
|
||||
),
|
||||
started_at=datetime.now(UTC),
|
||||
confidence=0.72,
|
||||
summary=f"{len(more_specifics)} more-specific announcements clustered around {root_prefix}.",
|
||||
evidence={"events": [item.get("metadata") for item in more_specifics[:10]]},
|
||||
)
|
||||
)
|
||||
|
||||
for (prefix, origin_asn), count in withdrawal_counter.items():
|
||||
if count >= 3:
|
||||
pending_anomalies.append(
|
||||
BGPAnomaly(
|
||||
snapshot_id=snapshot_id,
|
||||
task_id=task_id,
|
||||
source=source,
|
||||
anomaly_type="mass_withdrawal",
|
||||
severity="high" if count < 8 else "critical",
|
||||
status="active",
|
||||
entity_key=f"mass_withdrawal:{prefix}:{origin_asn}:{count}",
|
||||
prefix=prefix,
|
||||
origin_asn=origin_asn,
|
||||
new_origin_asn=None,
|
||||
peer_scope=[],
|
||||
started_at=datetime.now(UTC),
|
||||
confidence=min(0.55 + (count * 0.05), 0.95),
|
||||
summary=f"{count} withdrawal events observed for {prefix} in the current ingest window.",
|
||||
evidence={"withdrawal_count": count},
|
||||
)
|
||||
)
|
||||
|
||||
if not pending_anomalies:
|
||||
return 0
|
||||
|
||||
existing_result = await db.execute(
|
||||
select(BGPAnomaly.entity_key).where(
|
||||
BGPAnomaly.entity_key.in_([item.entity_key for item in pending_anomalies])
|
||||
)
|
||||
)
|
||||
existing_keys = {row[0] for row in existing_result.fetchall()}
|
||||
|
||||
created = 0
|
||||
for anomaly in pending_anomalies:
|
||||
if anomaly.entity_key in existing_keys:
|
||||
continue
|
||||
db.add(anomaly)
|
||||
created += 1
|
||||
|
||||
if created:
|
||||
await db.commit()
|
||||
return created
|
||||
120
backend/app/services/collectors/bgpstream.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""BGPStream backfill collector."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
from app.services.collectors.base import BaseCollector
|
||||
from app.services.collectors.bgp_common import create_bgp_anomalies_for_batch, normalize_bgp_event
|
||||
|
||||
|
||||
class BGPStreamBackfillCollector(BaseCollector):
|
||||
name = "bgpstream_bgp"
|
||||
priority = "P1"
|
||||
module = "L3"
|
||||
frequency_hours = 6
|
||||
data_type = "bgp_rib"
|
||||
fail_on_empty = True
|
||||
|
||||
async def fetch(self) -> list[dict[str, Any]]:
|
||||
if not self._resolved_url:
|
||||
raise RuntimeError("BGPStream URL is not configured")
|
||||
|
||||
return await asyncio.to_thread(self._fetch_resource_windows)
|
||||
|
||||
def _fetch_resource_windows(self) -> list[dict[str, Any]]:
|
||||
end = int(time.time()) - 3600
|
||||
start = end - 86400
|
||||
params = [
|
||||
("projects[]", "routeviews"),
|
||||
("collectors[]", "route-views2"),
|
||||
("types[]", "updates"),
|
||||
("intervals[]", f"{start},{end}"),
|
||||
]
|
||||
url = f"{self._resolved_url}/data?{urllib.parse.urlencode(params)}"
|
||||
request = urllib.request.Request(
|
||||
url,
|
||||
headers={"User-Agent": "Planet-Intelligence-System/1.0 (Python/collector)"},
|
||||
)
|
||||
with urllib.request.urlopen(request, timeout=30) as response:
|
||||
body = json.loads(response.read().decode())
|
||||
|
||||
if body.get("error"):
|
||||
raise RuntimeError(f"BGPStream broker error: {body['error']}")
|
||||
|
||||
return body.get("data", {}).get("resources", [])
|
||||
|
||||
def transform(self, raw_data: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
transformed: list[dict[str, Any]] = []
|
||||
for item in raw_data:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
|
||||
is_broker_window = any(key in item for key in ("filename", "url", "startTime", "start_time"))
|
||||
|
||||
if {"collector", "prefix"} <= set(item.keys()) and not is_broker_window:
|
||||
transformed.append(normalize_bgp_event(item, project="bgpstream"))
|
||||
continue
|
||||
|
||||
# Broker responses provide file windows rather than decoded events.
|
||||
collector = item.get("collector") or item.get("project") or "bgpstream"
|
||||
timestamp = item.get("time") or item.get("startTime") or item.get("start_time")
|
||||
name = item.get("filename") or item.get("url") or f"{collector}-window"
|
||||
normalized = normalize_bgp_event(
|
||||
{
|
||||
"collector": collector,
|
||||
"event_type": "rib",
|
||||
"prefix": item.get("prefix") or "historical-window",
|
||||
"timestamp": timestamp,
|
||||
"origin_asn": item.get("origin_asn"),
|
||||
"path": item.get("path") or [],
|
||||
"raw_message": item,
|
||||
},
|
||||
project="bgpstream",
|
||||
)
|
||||
transformed.append(
|
||||
normalized
|
||||
| {
|
||||
"name": name,
|
||||
"title": f"BGPStream {collector}",
|
||||
"description": "Historical BGPStream backfill window",
|
||||
"metadata": {
|
||||
**normalized["metadata"],
|
||||
"broker_record": item,
|
||||
},
|
||||
}
|
||||
)
|
||||
self._latest_transformed_batch = transformed
|
||||
return transformed
|
||||
|
||||
async def run(self, db):
|
||||
result = await super().run(db)
|
||||
if result.get("status") != "success":
|
||||
return result
|
||||
|
||||
snapshot_id = await self._resolve_snapshot_id(db, result.get("task_id"))
|
||||
anomaly_count = await create_bgp_anomalies_for_batch(
|
||||
db,
|
||||
source=self.name,
|
||||
snapshot_id=snapshot_id,
|
||||
task_id=result.get("task_id"),
|
||||
events=getattr(self, "_latest_transformed_batch", []),
|
||||
)
|
||||
result["anomalies_created"] = anomaly_count
|
||||
return result
|
||||
|
||||
async def _resolve_snapshot_id(self, db, task_id: int | None) -> int | None:
|
||||
if task_id is None:
|
||||
return None
|
||||
from sqlalchemy import select
|
||||
from app.models.data_snapshot import DataSnapshot
|
||||
|
||||
result = await db.execute(
|
||||
select(DataSnapshot.id).where(DataSnapshot.task_id == task_id).order_by(DataSnapshot.id.desc())
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
115
backend/app/services/collectors/celestrak.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""CelesTrak TLE Collector
|
||||
|
||||
Collects satellite TLE (Two-Line Element) data from CelesTrak.org.
|
||||
Free, no authentication required.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Dict, Any, List
|
||||
import httpx
|
||||
|
||||
from app.core.satellite_tle import build_tle_lines_from_elements
|
||||
from app.services.collectors.base import BaseCollector
|
||||
|
||||
|
||||
class CelesTrakTLECollector(BaseCollector):
|
||||
name = "celestrak_tle"
|
||||
priority = "P2"
|
||||
module = "L3"
|
||||
frequency_hours = 24
|
||||
data_type = "satellite_tle"
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
return "https://celestrak.org/NORAD/elements/gp.php"
|
||||
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
satellite_groups = [
|
||||
"starlink",
|
||||
"gps-ops",
|
||||
"galileo",
|
||||
"glonass",
|
||||
"beidou",
|
||||
"leo",
|
||||
"geo",
|
||||
"iridium-next",
|
||||
]
|
||||
|
||||
all_satellites = []
|
||||
|
||||
async with httpx.AsyncClient(timeout=120.0) as client:
|
||||
for group in satellite_groups:
|
||||
try:
|
||||
url = f"https://celestrak.org/NORAD/elements/gp.php?GROUP={group}&FORMAT=json"
|
||||
response = await client.get(url)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if isinstance(data, list):
|
||||
all_satellites.extend(data)
|
||||
print(f"CelesTrak: Fetched {len(data)} satellites from group '{group}'")
|
||||
except Exception as e:
|
||||
print(f"CelesTrak: Error fetching group '{group}': {e}")
|
||||
|
||||
if not all_satellites:
|
||||
return self._get_sample_data()
|
||||
|
||||
print(f"CelesTrak: Total satellites fetched: {len(all_satellites)}")
|
||||
|
||||
# Return raw data - base.run() will call transform()
|
||||
return all_satellites
|
||||
|
||||
def transform(self, raw_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
transformed = []
|
||||
for item in raw_data:
|
||||
tle_line1, tle_line2 = build_tle_lines_from_elements(
|
||||
norad_cat_id=item.get("NORAD_CAT_ID"),
|
||||
epoch=item.get("EPOCH"),
|
||||
inclination=item.get("INCLINATION"),
|
||||
raan=item.get("RA_OF_ASC_NODE"),
|
||||
eccentricity=item.get("ECCENTRICITY"),
|
||||
arg_of_perigee=item.get("ARG_OF_PERICENTER"),
|
||||
mean_anomaly=item.get("MEAN_ANOMALY"),
|
||||
mean_motion=item.get("MEAN_MOTION"),
|
||||
)
|
||||
|
||||
transformed.append(
|
||||
{
|
||||
"name": item.get("OBJECT_NAME", "Unknown"),
|
||||
"reference_date": item.get("EPOCH", ""),
|
||||
"metadata": {
|
||||
"norad_cat_id": item.get("NORAD_CAT_ID"),
|
||||
"international_designator": item.get("OBJECT_ID"),
|
||||
"epoch": item.get("EPOCH"),
|
||||
"mean_motion": item.get("MEAN_MOTION"),
|
||||
"eccentricity": item.get("ECCENTRICITY"),
|
||||
"inclination": item.get("INCLINATION"),
|
||||
"raan": item.get("RA_OF_ASC_NODE"),
|
||||
"arg_of_perigee": item.get("ARG_OF_PERICENTER"),
|
||||
"mean_anomaly": item.get("MEAN_ANOMALY"),
|
||||
"classification_type": item.get("CLASSIFICATION_TYPE"),
|
||||
"bstar": item.get("BSTAR"),
|
||||
"mean_motion_dot": item.get("MEAN_MOTION_DOT"),
|
||||
"mean_motion_ddot": item.get("MEAN_MOTION_DDOT"),
|
||||
"ephemeris_type": item.get("EPHEMERIS_TYPE"),
|
||||
# Prefer the original TLE lines when the source provides them.
|
||||
# If they are missing, store a normalized TLE pair built once on the backend.
|
||||
"tle_line1": item.get("TLE_LINE1") or tle_line1,
|
||||
"tle_line2": item.get("TLE_LINE2") or tle_line2,
|
||||
},
|
||||
}
|
||||
)
|
||||
return transformed
|
||||
|
||||
def _get_sample_data(self) -> List[Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
"name": "STARLINK-1000",
|
||||
"norad_cat_id": 44720,
|
||||
"international_designator": "2019-029AZ",
|
||||
"epoch": "2026-03-13T00:00:00Z",
|
||||
"mean_motion": 15.79234567,
|
||||
"eccentricity": 0.0001234,
|
||||
"inclination": 53.0,
|
||||
},
|
||||
]
|
||||
@@ -10,7 +10,7 @@ Some endpoints require authentication for higher rate limits.
|
||||
import asyncio
|
||||
import os
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import httpx
|
||||
from app.services.collectors.base import HTTPCollector
|
||||
@@ -59,7 +59,7 @@ class CloudflareRadarDeviceCollector(HTTPCollector):
|
||||
"other_percent": float(summary.get("other", 0)),
|
||||
"date_range": result.get("meta", {}).get("dateRange", {}),
|
||||
},
|
||||
"reference_date": datetime.utcnow().isoformat(),
|
||||
"reference_date": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -107,7 +107,7 @@ class CloudflareRadarTrafficCollector(HTTPCollector):
|
||||
"requests": item.get("requests"),
|
||||
"visit_duration": item.get("visitDuration"),
|
||||
},
|
||||
"reference_date": item.get("datetime", datetime.utcnow().isoformat()),
|
||||
"reference_date": item.get("datetime", datetime.now(UTC).isoformat()),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -155,7 +155,7 @@ class CloudflareRadarTopASCollector(HTTPCollector):
|
||||
"traffic_share": item.get("trafficShare"),
|
||||
"country_code": item.get("location", {}).get("countryCode"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().isoformat(),
|
||||
"reference_date": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
|
||||
@@ -6,7 +6,7 @@ https://epoch.ai/data/gpu-clusters
|
||||
|
||||
import re
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from bs4 import BeautifulSoup
|
||||
import httpx
|
||||
|
||||
@@ -64,7 +64,7 @@ class EpochAIGPUCollector(BaseCollector):
|
||||
"metadata": {
|
||||
"raw_data": perf_cell,
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, IndexError, AttributeError):
|
||||
@@ -114,6 +114,6 @@ class EpochAIGPUCollector(BaseCollector):
|
||||
"metadata": {
|
||||
"note": "Sample data - Epoch AI page structure may vary",
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
},
|
||||
]
|
||||
|
||||
@@ -4,7 +4,7 @@ Collects landing point data from FAO CSV API.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
import httpx
|
||||
|
||||
from app.services.collectors.base import BaseCollector
|
||||
@@ -58,7 +58,7 @@ class FAOLandingPointCollector(BaseCollector):
|
||||
"is_tbd": is_tbd,
|
||||
"original_id": feature_id,
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
result.append(entry)
|
||||
except (ValueError, IndexError):
|
||||
|
||||
@@ -7,7 +7,7 @@ https://huggingface.co/spaces
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from app.services.collectors.base import HTTPCollector
|
||||
|
||||
@@ -46,7 +46,7 @@ class HuggingFaceModelCollector(HTTPCollector):
|
||||
"library_name": item.get("library_name"),
|
||||
"created_at": item.get("createdAt"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -87,7 +87,7 @@ class HuggingFaceDatasetCollector(HTTPCollector):
|
||||
"tags": (item.get("tags", []) or [])[:10],
|
||||
"created_at": item.get("createdAt"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -128,7 +128,7 @@ class HuggingFaceSpacesCollector(HTTPCollector):
|
||||
"tags": (item.get("tags", []) or [])[:10],
|
||||
"created_at": item.get("createdAt"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
|
||||
@@ -13,7 +13,7 @@ To get higher limits, set PEERINGDB_API_KEY environment variable.
|
||||
import asyncio
|
||||
import os
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import httpx
|
||||
from app.services.collectors.base import HTTPCollector
|
||||
@@ -76,7 +76,7 @@ class PeeringDBIXPCollector(HTTPCollector):
|
||||
print(f"Warning: PeeringDB collection failed after {max_retries} retries: {last_error}")
|
||||
return {}
|
||||
|
||||
async def collect(self) -> List[Dict[str, Any]]:
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
"""Collect IXP data from PeeringDB with rate limit handling"""
|
||||
response_data = await self.fetch_with_retry()
|
||||
if not response_data:
|
||||
@@ -106,7 +106,7 @@ class PeeringDBIXPCollector(HTTPCollector):
|
||||
"created": item.get("created"),
|
||||
"updated": item.get("updated"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().isoformat(),
|
||||
"reference_date": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -177,7 +177,7 @@ class PeeringDBNetworkCollector(HTTPCollector):
|
||||
print(f"Warning: PeeringDB collection failed after {max_retries} retries: {last_error}")
|
||||
return {}
|
||||
|
||||
async def collect(self) -> List[Dict[str, Any]]:
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
"""Collect Network data from PeeringDB with rate limit handling"""
|
||||
response_data = await self.fetch_with_retry()
|
||||
if not response_data:
|
||||
@@ -209,7 +209,7 @@ class PeeringDBNetworkCollector(HTTPCollector):
|
||||
"created": item.get("created"),
|
||||
"updated": item.get("updated"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().isoformat(),
|
||||
"reference_date": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -280,7 +280,7 @@ class PeeringDBFacilityCollector(HTTPCollector):
|
||||
print(f"Warning: PeeringDB collection failed after {max_retries} retries: {last_error}")
|
||||
return {}
|
||||
|
||||
async def collect(self) -> List[Dict[str, Any]]:
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
"""Collect Facility data from PeeringDB with rate limit handling"""
|
||||
response_data = await self.fetch_with_retry()
|
||||
if not response_data:
|
||||
@@ -311,7 +311,7 @@ class PeeringDBFacilityCollector(HTTPCollector):
|
||||
"created": item.get("created"),
|
||||
"updated": item.get("updated"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().isoformat(),
|
||||
"reference_date": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
data.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
|
||||
131
backend/app/services/collectors/ris_live.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""RIPE RIS Live collector."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
from app.services.collectors.base import BaseCollector
|
||||
from app.services.collectors.bgp_common import create_bgp_anomalies_for_batch, normalize_bgp_event
|
||||
|
||||
|
||||
class RISLiveCollector(BaseCollector):
|
||||
name = "ris_live_bgp"
|
||||
priority = "P1"
|
||||
module = "L3"
|
||||
frequency_hours = 1
|
||||
data_type = "bgp_update"
|
||||
fail_on_empty = True
|
||||
max_messages = 100
|
||||
idle_timeout_seconds = 15
|
||||
|
||||
async def fetch(self) -> list[dict[str, Any]]:
|
||||
if not self._resolved_url:
|
||||
raise RuntimeError("RIS Live URL is not configured")
|
||||
|
||||
return await asyncio.to_thread(self._fetch_via_stream)
|
||||
|
||||
def _fetch_via_stream(self) -> list[dict[str, Any]]:
|
||||
events: list[dict[str, Any]] = []
|
||||
stream_url = "https://ris-live.ripe.net/v1/stream/?format=json&client=planet-ris-live"
|
||||
subscribe = json.dumps(
|
||||
{
|
||||
"host": "rrc00",
|
||||
"type": "UPDATE",
|
||||
"require": "announcements",
|
||||
}
|
||||
)
|
||||
request = urllib.request.Request(
|
||||
stream_url,
|
||||
headers={"X-RIS-Subscribe": subscribe},
|
||||
)
|
||||
with urllib.request.urlopen(request, timeout=20) as response:
|
||||
while len(events) < self.max_messages:
|
||||
line = response.readline().decode().strip()
|
||||
if not line:
|
||||
break
|
||||
payload = json.loads(line)
|
||||
if payload.get("type") != "ris_message":
|
||||
continue
|
||||
data = payload.get("data", {})
|
||||
if isinstance(data, dict):
|
||||
events.append(data)
|
||||
return events
|
||||
|
||||
def transform(self, raw_data: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
transformed: list[dict[str, Any]] = []
|
||||
for item in raw_data:
|
||||
announcements = item.get("announcements") or []
|
||||
withdrawals = item.get("withdrawals") or []
|
||||
|
||||
for announcement in announcements:
|
||||
next_hop = announcement.get("next_hop")
|
||||
for prefix in announcement.get("prefixes") or []:
|
||||
transformed.append(
|
||||
normalize_bgp_event(
|
||||
{
|
||||
**item,
|
||||
"collector": item.get("host", "").replace(".ripe.net", ""),
|
||||
"event_type": "announcement",
|
||||
"prefix": prefix,
|
||||
"next_hop": next_hop,
|
||||
},
|
||||
project="ris-live",
|
||||
)
|
||||
)
|
||||
|
||||
for prefix in withdrawals:
|
||||
transformed.append(
|
||||
normalize_bgp_event(
|
||||
{
|
||||
**item,
|
||||
"collector": item.get("host", "").replace(".ripe.net", ""),
|
||||
"event_type": "withdrawal",
|
||||
"prefix": prefix,
|
||||
},
|
||||
project="ris-live",
|
||||
)
|
||||
)
|
||||
|
||||
if not announcements and not withdrawals:
|
||||
transformed.append(
|
||||
normalize_bgp_event(
|
||||
{
|
||||
**item,
|
||||
"collector": item.get("host", "").replace(".ripe.net", ""),
|
||||
},
|
||||
project="ris-live",
|
||||
)
|
||||
)
|
||||
|
||||
self._latest_transformed_batch = transformed
|
||||
return transformed
|
||||
|
||||
async def run(self, db):
|
||||
result = await super().run(db)
|
||||
if result.get("status") != "success":
|
||||
return result
|
||||
|
||||
snapshot_id = await self._resolve_snapshot_id(db, result.get("task_id"))
|
||||
anomaly_count = await create_bgp_anomalies_for_batch(
|
||||
db,
|
||||
source=self.name,
|
||||
snapshot_id=snapshot_id,
|
||||
task_id=result.get("task_id"),
|
||||
events=getattr(self, "_latest_transformed_batch", []),
|
||||
)
|
||||
result["anomalies_created"] = anomaly_count
|
||||
return result
|
||||
|
||||
async def _resolve_snapshot_id(self, db, task_id: int | None) -> int | None:
|
||||
if task_id is None:
|
||||
return None
|
||||
from sqlalchemy import select
|
||||
from app.models.data_snapshot import DataSnapshot
|
||||
|
||||
result = await db.execute(
|
||||
select(DataSnapshot.id).where(DataSnapshot.task_id == task_id).order_by(DataSnapshot.id.desc())
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
239
backend/app/services/collectors/spacetrack.py
Normal file
@@ -0,0 +1,239 @@
|
||||
"""Space-Track TLE Collector
|
||||
|
||||
Collects satellite TLE (Two-Line Element) data from Space-Track.org.
|
||||
API documentation: https://www.space-track.org/documentation
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Dict, Any, List
|
||||
import httpx
|
||||
|
||||
from app.services.collectors.base import BaseCollector
|
||||
from app.core.data_sources import get_data_sources_config
|
||||
from app.core.satellite_tle import build_tle_lines_from_elements
|
||||
|
||||
|
||||
class SpaceTrackTLECollector(BaseCollector):
|
||||
name = "spacetrack_tle"
|
||||
priority = "P2"
|
||||
module = "L3"
|
||||
frequency_hours = 24
|
||||
data_type = "satellite_tle"
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
config = get_data_sources_config()
|
||||
if self._resolved_url:
|
||||
return self._resolved_url
|
||||
return config.get_yaml_url("spacetrack_tle")
|
||||
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
from app.core.config import settings
|
||||
|
||||
username = settings.SPACETRACK_USERNAME
|
||||
password = settings.SPACETRACK_PASSWORD
|
||||
|
||||
if not username or not password:
|
||||
print("SPACETRACK: No credentials configured, using sample data")
|
||||
return self._get_sample_data()
|
||||
|
||||
print(f"SPACETRACK: Attempting to fetch TLE data with username: {username}")
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(
|
||||
timeout=120.0,
|
||||
follow_redirects=True,
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
|
||||
"Accept": "application/json, text/html, */*",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Referer": "https://www.space-track.org/",
|
||||
},
|
||||
) as client:
|
||||
await client.get("https://www.space-track.org/")
|
||||
|
||||
login_response = await client.post(
|
||||
"https://www.space-track.org/ajaxauth/login",
|
||||
data={
|
||||
"identity": username,
|
||||
"password": password,
|
||||
},
|
||||
)
|
||||
print(f"SPACETRACK: Login response status: {login_response.status_code}")
|
||||
print(f"SPACETRACK: Login response URL: {login_response.url}")
|
||||
|
||||
if login_response.status_code == 403:
|
||||
print("SPACETRACK: Trying alternate login method...")
|
||||
|
||||
async with httpx.AsyncClient(
|
||||
timeout=120.0,
|
||||
follow_redirects=True,
|
||||
) as alt_client:
|
||||
await alt_client.get("https://www.space-track.org/")
|
||||
|
||||
form_data = {
|
||||
"username": username,
|
||||
"password": password,
|
||||
"query": "class/gp/NORAD_CAT_ID/25544/format/json",
|
||||
}
|
||||
alt_login = await alt_client.post(
|
||||
"https://www.space-track.org/ajaxauth/login",
|
||||
data={
|
||||
"identity": username,
|
||||
"password": password,
|
||||
},
|
||||
)
|
||||
print(f"SPACETRACK: Alt login status: {alt_login.status_code}")
|
||||
|
||||
if alt_login.status_code == 200:
|
||||
tle_response = await alt_client.get(
|
||||
"https://www.space-track.org/basicspacedata/query/class/gp/NORAD_CAT_ID/25544/format/json"
|
||||
)
|
||||
if tle_response.status_code == 200:
|
||||
data = tle_response.json()
|
||||
print(f"SPACETRACK: Received {len(data)} records via alt method")
|
||||
return data
|
||||
|
||||
if login_response.status_code != 200:
|
||||
print(f"SPACETRACK: Login failed, using sample data")
|
||||
return self._get_sample_data()
|
||||
|
||||
tle_response = await client.get(
|
||||
"https://www.space-track.org/basicspacedata/query/class/gp/NORAD_CAT_ID/25544/format/json"
|
||||
)
|
||||
print(f"SPACETRACK: TLE query status: {tle_response.status_code}")
|
||||
|
||||
if tle_response.status_code != 200:
|
||||
print(f"SPACETRACK: Query failed, using sample data")
|
||||
return self._get_sample_data()
|
||||
|
||||
data = tle_response.json()
|
||||
print(f"SPACETRACK: Received {len(data)} records")
|
||||
return data
|
||||
except Exception as e:
|
||||
print(f"SPACETRACK: Error - {e}, using sample data")
|
||||
return self._get_sample_data()
|
||||
|
||||
print(f"SPACETRACK: Attempting to fetch TLE data with username: {username}")
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(
|
||||
timeout=120.0,
|
||||
follow_redirects=True,
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
"Accept": "application/json, text/html, */*",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
},
|
||||
) as client:
|
||||
# First, visit the main page to get any cookies
|
||||
await client.get("https://www.space-track.org/")
|
||||
|
||||
# Login to get session cookie
|
||||
login_response = await client.post(
|
||||
"https://www.space-track.org/ajaxauth/login",
|
||||
data={
|
||||
"identity": username,
|
||||
"password": password,
|
||||
},
|
||||
)
|
||||
print(f"SPACETRACK: Login response status: {login_response.status_code}")
|
||||
print(f"SPACETRACK: Login response URL: {login_response.url}")
|
||||
print(f"SPACETRACK: Login response body: {login_response.text[:500]}")
|
||||
|
||||
if login_response.status_code != 200:
|
||||
print(f"SPACETRACK: Login failed, using sample data")
|
||||
return self._get_sample_data()
|
||||
|
||||
# Query for TLE data (get first 1000 satellites)
|
||||
tle_response = await client.get(
|
||||
"https://www.space-track.org/basicspacedata/query"
|
||||
"/class/gp"
|
||||
"/orderby/EPOCH%20desc"
|
||||
"/limit/1000"
|
||||
"/format/json"
|
||||
)
|
||||
print(f"SPACETRACK: TLE query status: {tle_response.status_code}")
|
||||
|
||||
if tle_response.status_code != 200:
|
||||
print(f"SPACETRACK: Query failed, using sample data")
|
||||
return self._get_sample_data()
|
||||
|
||||
data = tle_response.json()
|
||||
print(f"SPACETRACK: Received {len(data)} records")
|
||||
return data
|
||||
except Exception as e:
|
||||
print(f"SPACETRACK: Error - {e}, using sample data")
|
||||
return self._get_sample_data()
|
||||
|
||||
def transform(self, raw_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""Transform TLE data to internal format"""
|
||||
transformed = []
|
||||
for item in raw_data:
|
||||
tle_line1, tle_line2 = build_tle_lines_from_elements(
|
||||
norad_cat_id=item.get("NORAD_CAT_ID"),
|
||||
epoch=item.get("EPOCH"),
|
||||
inclination=item.get("INCLINATION"),
|
||||
raan=item.get("RAAN"),
|
||||
eccentricity=item.get("ECCENTRICITY"),
|
||||
arg_of_perigee=item.get("ARG_OF_PERIGEE"),
|
||||
mean_anomaly=item.get("MEAN_ANOMALY"),
|
||||
mean_motion=item.get("MEAN_MOTION"),
|
||||
)
|
||||
transformed.append(
|
||||
{
|
||||
"name": item.get("OBJECT_NAME", "Unknown"),
|
||||
"reference_date": item.get("EPOCH", ""),
|
||||
"metadata": {
|
||||
"norad_cat_id": item.get("NORAD_CAT_ID"),
|
||||
"international_designator": item.get("INTL_DESIGNATOR"),
|
||||
"epoch": item.get("EPOCH"),
|
||||
"mean_motion": item.get("MEAN_MOTION"),
|
||||
"eccentricity": item.get("ECCENTRICITY"),
|
||||
"inclination": item.get("INCLINATION"),
|
||||
"raan": item.get("RAAN"),
|
||||
"arg_of_perigee": item.get("ARG_OF_PERIGEE"),
|
||||
"mean_anomaly": item.get("MEAN_ANOMALY"),
|
||||
"ephemeris_type": item.get("EPHEMERIS_TYPE"),
|
||||
"classification_type": item.get("CLASSIFICATION_TYPE"),
|
||||
"element_set_no": item.get("ELEMENT_SET_NO"),
|
||||
"rev_at_epoch": item.get("REV_AT_EPOCH"),
|
||||
"bstar": item.get("BSTAR"),
|
||||
"mean_motion_dot": item.get("MEAN_MOTION_DOT"),
|
||||
"mean_motion_ddot": item.get("MEAN_MOTION_DDOT"),
|
||||
# Prefer original lines from the source, but keep a backend-built pair as a stable fallback.
|
||||
"tle_line1": item.get("TLE_LINE1") or item.get("TLE1") or tle_line1,
|
||||
"tle_line2": item.get("TLE_LINE2") or item.get("TLE2") or tle_line2,
|
||||
},
|
||||
}
|
||||
)
|
||||
return transformed
|
||||
|
||||
def _get_sample_data(self) -> List[Dict[str, Any]]:
|
||||
"""Return sample TLE data for testing"""
|
||||
return [
|
||||
{
|
||||
"name": "ISS (ZARYA)",
|
||||
"norad_cat_id": 25544,
|
||||
"international_designator": "1998-067A",
|
||||
"epoch": "2026-03-13T00:00:00Z",
|
||||
"mean_motion": 15.49872723,
|
||||
"eccentricity": 0.0006292,
|
||||
"inclination": 51.6400,
|
||||
"raan": 315.0000,
|
||||
"arg_of_perigee": 100.0000,
|
||||
"mean_anomaly": 260.0000,
|
||||
},
|
||||
{
|
||||
"name": "STARLINK-1000",
|
||||
"norad_cat_id": 44720,
|
||||
"international_designator": "2019-029AZ",
|
||||
"epoch": "2026-03-13T00:00:00Z",
|
||||
"mean_motion": 15.79234567,
|
||||
"eccentricity": 0.0001234,
|
||||
"inclination": 53.0000,
|
||||
"raan": 120.0000,
|
||||
"arg_of_perigee": 90.0000,
|
||||
"mean_anomaly": 270.0000,
|
||||
},
|
||||
]
|
||||
@@ -7,7 +7,7 @@ Uses Wayback Machine as backup data source since live data requires JavaScript r
|
||||
import json
|
||||
import re
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from datetime import UTC, datetime
|
||||
from bs4 import BeautifulSoup
|
||||
import httpx
|
||||
|
||||
@@ -103,7 +103,7 @@ class TeleGeographyCableCollector(BaseCollector):
|
||||
"capacity_tbps": item.get("capacity"),
|
||||
"url": item.get("url"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
result.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -131,7 +131,7 @@ class TeleGeographyCableCollector(BaseCollector):
|
||||
"owner": "Meta, Orange, Vodafone, etc.",
|
||||
"status": "active",
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
},
|
||||
{
|
||||
"source_id": "telegeo_sample_2",
|
||||
@@ -147,7 +147,7 @@ class TeleGeographyCableCollector(BaseCollector):
|
||||
"owner": "Alibaba, NEC",
|
||||
"status": "planned",
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
},
|
||||
]
|
||||
|
||||
@@ -187,7 +187,7 @@ class TeleGeographyLandingPointCollector(BaseCollector):
|
||||
"cable_count": len(item.get("cables", [])),
|
||||
"url": item.get("url"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
result.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -211,7 +211,7 @@ class TeleGeographyLandingPointCollector(BaseCollector):
|
||||
"value": "",
|
||||
"unit": "",
|
||||
"metadata": {"note": "Sample data"},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
},
|
||||
]
|
||||
|
||||
@@ -258,7 +258,7 @@ class TeleGeographyCableSystemCollector(BaseCollector):
|
||||
"investment": item.get("investment"),
|
||||
"url": item.get("url"),
|
||||
},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
}
|
||||
result.append(entry)
|
||||
except (ValueError, TypeError, KeyError):
|
||||
@@ -282,6 +282,6 @@ class TeleGeographyCableSystemCollector(BaseCollector):
|
||||
"value": "5000",
|
||||
"unit": "km",
|
||||
"metadata": {"note": "Sample data"},
|
||||
"reference_date": datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"reference_date": datetime.now(UTC).strftime("%Y-%m-%d"),
|
||||
},
|
||||
]
|
||||
|
||||
@@ -4,9 +4,9 @@ Collects data from TOP500 supercomputer rankings.
|
||||
https://top500.org/lists/top500/
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from bs4 import BeautifulSoup
|
||||
import httpx
|
||||
|
||||
@@ -21,14 +21,108 @@ class TOP500Collector(BaseCollector):
|
||||
data_type = "supercomputer"
|
||||
|
||||
async def fetch(self) -> List[Dict[str, Any]]:
|
||||
"""Fetch TOP500 data from website (scraping)"""
|
||||
# Get the latest list page
|
||||
"""Fetch TOP500 list data and enrich each row with detail-page metadata."""
|
||||
url = "https://top500.org/lists/top500/list/2025/11/"
|
||||
|
||||
async with httpx.AsyncClient(timeout=60.0) as client:
|
||||
async with httpx.AsyncClient(timeout=60.0, follow_redirects=True) as client:
|
||||
response = await client.get(url)
|
||||
response.raise_for_status()
|
||||
return self.parse_response(response.text)
|
||||
entries = self.parse_response(response.text)
|
||||
|
||||
semaphore = asyncio.Semaphore(8)
|
||||
|
||||
async def enrich(entry: Dict[str, Any]) -> Dict[str, Any]:
|
||||
detail_url = entry.pop("_detail_url", "")
|
||||
if not detail_url:
|
||||
return entry
|
||||
|
||||
async with semaphore:
|
||||
try:
|
||||
detail_response = await client.get(detail_url)
|
||||
detail_response.raise_for_status()
|
||||
entry["metadata"].update(self.parse_detail_response(detail_response.text))
|
||||
except Exception:
|
||||
entry["metadata"]["detail_fetch_failed"] = True
|
||||
return entry
|
||||
|
||||
return await asyncio.gather(*(enrich(entry) for entry in entries))
|
||||
|
||||
def _extract_system_fields(self, system_cell) -> Dict[str, str]:
|
||||
link = system_cell.find("a")
|
||||
system_name = link.get_text(" ", strip=True) if link else system_cell.get_text(" ", strip=True)
|
||||
detail_url = ""
|
||||
if link and link.get("href"):
|
||||
detail_url = f"https://top500.org{link.get('href')}"
|
||||
|
||||
manufacturer = ""
|
||||
if link and link.next_sibling:
|
||||
manufacturer = str(link.next_sibling).strip(" ,\n\t")
|
||||
|
||||
cell_text = system_cell.get_text("\n", strip=True)
|
||||
lines = [line.strip(" ,") for line in cell_text.splitlines() if line.strip()]
|
||||
|
||||
site = ""
|
||||
country = ""
|
||||
if lines:
|
||||
system_name = lines[0]
|
||||
if len(lines) >= 3:
|
||||
site = lines[-2]
|
||||
country = lines[-1]
|
||||
elif len(lines) == 2:
|
||||
country = lines[-1]
|
||||
|
||||
if not manufacturer and len(lines) >= 2:
|
||||
manufacturer = lines[1]
|
||||
|
||||
return {
|
||||
"name": system_name,
|
||||
"manufacturer": manufacturer,
|
||||
"site": site,
|
||||
"country": country,
|
||||
"detail_url": detail_url,
|
||||
}
|
||||
|
||||
def parse_detail_response(self, html: str) -> Dict[str, Any]:
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
detail_table = soup.find("table", {"class": "table table-condensed"})
|
||||
if not detail_table:
|
||||
return {}
|
||||
|
||||
detail_map: Dict[str, Any] = {}
|
||||
label_aliases = {
|
||||
"Site": "site",
|
||||
"Manufacturer": "manufacturer",
|
||||
"Cores": "cores",
|
||||
"Processor": "processor",
|
||||
"Interconnect": "interconnect",
|
||||
"Installation Year": "installation_year",
|
||||
"Linpack Performance (Rmax)": "rmax",
|
||||
"Theoretical Peak (Rpeak)": "rpeak",
|
||||
"Nmax": "nmax",
|
||||
"HPCG": "hpcg",
|
||||
"Power": "power",
|
||||
"Power Measurement Level": "power_measurement_level",
|
||||
"Operating System": "operating_system",
|
||||
"Compiler": "compiler",
|
||||
"Math Library": "math_library",
|
||||
"MPI": "mpi",
|
||||
}
|
||||
|
||||
for row in detail_table.find_all("tr"):
|
||||
header = row.find("th")
|
||||
value_cell = row.find("td")
|
||||
if not header or not value_cell:
|
||||
continue
|
||||
|
||||
label = header.get_text(" ", strip=True).rstrip(":")
|
||||
key = label_aliases.get(label)
|
||||
if not key:
|
||||
continue
|
||||
|
||||
value = value_cell.get_text(" ", strip=True)
|
||||
detail_map[key] = value
|
||||
|
||||
return detail_map
|
||||
|
||||
def parse_response(self, html: str) -> List[Dict[str, Any]]:
|
||||
"""Parse TOP500 HTML response"""
|
||||
@@ -36,27 +130,26 @@ class TOP500Collector(BaseCollector):
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
|
||||
# Find the table with TOP500 data
|
||||
table = soup.find("table", {"class": "top500-table"})
|
||||
if not table:
|
||||
# Try alternative table selector
|
||||
table = soup.find("table", {"id": "top500"})
|
||||
|
||||
if not table:
|
||||
# Try to find any table with rank data
|
||||
tables = soup.find_all("table")
|
||||
for t in tables:
|
||||
if t.find(string=re.compile(r"Rank.*System.*Cores.*Rmax", re.I)):
|
||||
table = t
|
||||
table = None
|
||||
for candidate in soup.find_all("table"):
|
||||
header_cells = [
|
||||
cell.get_text(" ", strip=True) for cell in candidate.select("thead th")
|
||||
]
|
||||
normalized_headers = [header.lower() for header in header_cells]
|
||||
if (
|
||||
"rank" in normalized_headers
|
||||
and "system" in normalized_headers
|
||||
and any("cores" in header for header in normalized_headers)
|
||||
and any("rmax" in header for header in normalized_headers)
|
||||
):
|
||||
table = candidate
|
||||
break
|
||||
|
||||
if not table:
|
||||
# Fallback: try to extract data from any table
|
||||
tables = soup.find_all("table")
|
||||
if tables:
|
||||
table = tables[0]
|
||||
table = soup.find("table", {"class": "top500-table"}) or soup.find("table", {"id": "top500"})
|
||||
|
||||
if table:
|
||||
rows = table.find_all("tr")
|
||||
rows = table.select("tr")
|
||||
for row in rows[1:]: # Skip header row
|
||||
cells = row.find_all(["td", "th"])
|
||||
if len(cells) >= 6:
|
||||
@@ -68,43 +161,26 @@ class TOP500Collector(BaseCollector):
|
||||
|
||||
rank = int(rank_text)
|
||||
|
||||
# System name (may contain link)
|
||||
system_cell = cells[1]
|
||||
system_name = system_cell.get_text(strip=True)
|
||||
# Try to get full name from link title or data attribute
|
||||
link = system_cell.find("a")
|
||||
if link and link.get("title"):
|
||||
system_name = link.get("title")
|
||||
system_fields = self._extract_system_fields(system_cell)
|
||||
system_name = system_fields["name"]
|
||||
manufacturer = system_fields["manufacturer"]
|
||||
site = system_fields["site"]
|
||||
country = system_fields["country"]
|
||||
detail_url = system_fields["detail_url"]
|
||||
|
||||
# Country
|
||||
country_cell = cells[2]
|
||||
country = country_cell.get_text(strip=True)
|
||||
# Try to get country from data attribute or image alt
|
||||
img = country_cell.find("img")
|
||||
if img and img.get("alt"):
|
||||
country = img.get("alt")
|
||||
|
||||
# Extract location (city)
|
||||
city = ""
|
||||
location_text = country_cell.get_text(strip=True)
|
||||
if "(" in location_text and ")" in location_text:
|
||||
city = location_text.split("(")[0].strip()
|
||||
cores = cells[2].get_text(strip=True).replace(",", "")
|
||||
|
||||
# Cores
|
||||
cores = cells[3].get_text(strip=True).replace(",", "")
|
||||
|
||||
# Rmax
|
||||
rmax_text = cells[4].get_text(strip=True)
|
||||
rmax_text = cells[3].get_text(strip=True)
|
||||
rmax = self._parse_performance(rmax_text)
|
||||
|
||||
# Rpeak
|
||||
rpeak_text = cells[5].get_text(strip=True)
|
||||
rpeak_text = cells[4].get_text(strip=True)
|
||||
rpeak = self._parse_performance(rpeak_text)
|
||||
|
||||
# Power (optional)
|
||||
power = ""
|
||||
if len(cells) >= 7:
|
||||
power = cells[6].get_text(strip=True)
|
||||
if len(cells) >= 6:
|
||||
power = cells[5].get_text(strip=True).replace(",", "")
|
||||
|
||||
entry = {
|
||||
"source_id": f"top500_{rank}",
|
||||
@@ -117,10 +193,14 @@ class TOP500Collector(BaseCollector):
|
||||
"unit": "PFlop/s",
|
||||
"metadata": {
|
||||
"rank": rank,
|
||||
"r_peak": rpeak,
|
||||
"power": power,
|
||||
"cores": cores,
|
||||
"rmax": rmax_text,
|
||||
"rpeak": rpeak_text,
|
||||
"power": power,
|
||||
"manufacturer": manufacturer,
|
||||
"site": site,
|
||||
},
|
||||
"_detail_url": detail_url,
|
||||
"reference_date": "2025-11-01",
|
||||
}
|
||||
data.append(entry)
|
||||
@@ -184,10 +264,15 @@ class TOP500Collector(BaseCollector):
|
||||
"unit": "PFlop/s",
|
||||
"metadata": {
|
||||
"rank": 1,
|
||||
"r_peak": 2746.38,
|
||||
"power": 29581,
|
||||
"cores": 11039616,
|
||||
"cores": "11039616",
|
||||
"rmax": "1742.00",
|
||||
"rpeak": "2746.38",
|
||||
"power": "29581",
|
||||
"manufacturer": "HPE",
|
||||
"site": "DOE/NNSA/LLNL",
|
||||
"processor": "AMD 4th Gen EPYC 24C 1.8GHz",
|
||||
"interconnect": "Slingshot-11",
|
||||
"installation_year": "2025",
|
||||
},
|
||||
"reference_date": "2025-11-01",
|
||||
},
|
||||
@@ -202,10 +287,12 @@ class TOP500Collector(BaseCollector):
|
||||
"unit": "PFlop/s",
|
||||
"metadata": {
|
||||
"rank": 2,
|
||||
"r_peak": 2055.72,
|
||||
"power": 24607,
|
||||
"cores": 9066176,
|
||||
"cores": "9066176",
|
||||
"rmax": "1353.00",
|
||||
"rpeak": "2055.72",
|
||||
"power": "24607",
|
||||
"manufacturer": "HPE",
|
||||
"site": "DOE/SC/Oak Ridge National Laboratory",
|
||||
},
|
||||
"reference_date": "2025-11-01",
|
||||
},
|
||||
@@ -220,9 +307,10 @@ class TOP500Collector(BaseCollector):
|
||||
"unit": "PFlop/s",
|
||||
"metadata": {
|
||||
"rank": 3,
|
||||
"r_peak": 1980.01,
|
||||
"power": 38698,
|
||||
"cores": 9264128,
|
||||
"cores": "9264128",
|
||||
"rmax": "1012.00",
|
||||
"rpeak": "1980.01",
|
||||
"power": "38698",
|
||||
"manufacturer": "Intel",
|
||||
},
|
||||
"reference_date": "2025-11-01",
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
"""Task Scheduler for running collection jobs"""
|
||||
"""Task Scheduler for running collection jobs."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
|
||||
from app.db.session import async_session_factory
|
||||
from app.core.time import to_iso8601_utc
|
||||
from app.models.datasource import DataSource
|
||||
from app.models.task import CollectionTask
|
||||
from app.services.collectors.registry import collector_registry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -17,134 +20,185 @@ logger = logging.getLogger(__name__)
|
||||
scheduler = AsyncIOScheduler()
|
||||
|
||||
|
||||
COLLECTOR_TO_ID = {
|
||||
"top500": 1,
|
||||
"epoch_ai_gpu": 2,
|
||||
"huggingface_models": 3,
|
||||
"huggingface_datasets": 4,
|
||||
"huggingface_spaces": 5,
|
||||
"peeringdb_ixp": 6,
|
||||
"peeringdb_network": 7,
|
||||
"peeringdb_facility": 8,
|
||||
"telegeography_cables": 9,
|
||||
"telegeography_landing": 10,
|
||||
"telegeography_systems": 11,
|
||||
"arcgis_cables": 15,
|
||||
"arcgis_landing_points": 16,
|
||||
"arcgis_cable_landing_relation": 17,
|
||||
"fao_landing_points": 18,
|
||||
}
|
||||
async def _update_next_run_at(datasource: DataSource, session) -> None:
|
||||
job = scheduler.get_job(datasource.source)
|
||||
datasource.next_run_at = job.next_run_time if job else None
|
||||
await session.commit()
|
||||
|
||||
|
||||
async def _apply_datasource_schedule(datasource: DataSource, session) -> None:
|
||||
collector = collector_registry.get(datasource.source)
|
||||
if not collector:
|
||||
logger.warning("Collector not found for datasource %s", datasource.source)
|
||||
return
|
||||
|
||||
collector_registry.set_active(datasource.source, datasource.is_active)
|
||||
|
||||
existing_job = scheduler.get_job(datasource.source)
|
||||
if existing_job:
|
||||
scheduler.remove_job(datasource.source)
|
||||
|
||||
if datasource.is_active:
|
||||
scheduler.add_job(
|
||||
run_collector_task,
|
||||
trigger=IntervalTrigger(minutes=max(1, datasource.frequency_minutes)),
|
||||
id=datasource.source,
|
||||
name=datasource.name,
|
||||
replace_existing=True,
|
||||
kwargs={"collector_name": datasource.source},
|
||||
)
|
||||
logger.info(
|
||||
"Scheduled collector: %s (every %sm)",
|
||||
datasource.source,
|
||||
datasource.frequency_minutes,
|
||||
)
|
||||
else:
|
||||
logger.info("Collector disabled: %s", datasource.source)
|
||||
|
||||
await _update_next_run_at(datasource, session)
|
||||
|
||||
|
||||
async def run_collector_task(collector_name: str):
|
||||
"""Run a single collector task"""
|
||||
"""Run a single collector task."""
|
||||
collector = collector_registry.get(collector_name)
|
||||
if not collector:
|
||||
logger.error(f"Collector not found: {collector_name}")
|
||||
logger.error("Collector not found: %s", collector_name)
|
||||
return
|
||||
|
||||
# Get the correct datasource_id
|
||||
datasource_id = COLLECTOR_TO_ID.get(collector_name, 1)
|
||||
async with async_session_factory() as db:
|
||||
result = await db.execute(select(DataSource).where(DataSource.source == collector_name))
|
||||
datasource = result.scalar_one_or_none()
|
||||
if not datasource:
|
||||
logger.error("Datasource not found for collector: %s", collector_name)
|
||||
return
|
||||
|
||||
if not datasource.is_active:
|
||||
logger.info("Skipping disabled collector: %s", collector_name)
|
||||
return
|
||||
|
||||
try:
|
||||
collector._datasource_id = datasource.id
|
||||
logger.info("Running collector: %s (datasource_id=%s)", collector_name, datasource.id)
|
||||
task_result = await collector.run(db)
|
||||
datasource.last_run_at = datetime.now(UTC)
|
||||
datasource.last_status = task_result.get("status")
|
||||
await _update_next_run_at(datasource, db)
|
||||
logger.info("Collector %s completed: %s", collector_name, task_result)
|
||||
except Exception as exc:
|
||||
datasource.last_run_at = datetime.now(UTC)
|
||||
datasource.last_status = "failed"
|
||||
await db.commit()
|
||||
logger.exception("Collector %s failed: %s", collector_name, exc)
|
||||
|
||||
|
||||
async def cleanup_stale_running_tasks(max_age_hours: int = 2) -> int:
|
||||
"""Mark stale running tasks as failed after restarts or collector hangs."""
|
||||
cutoff = datetime.now(UTC) - timedelta(hours=max_age_hours)
|
||||
|
||||
async with async_session_factory() as db:
|
||||
try:
|
||||
# Set the datasource_id on the collector instance
|
||||
collector._datasource_id = datasource_id
|
||||
|
||||
logger.info(f"Running collector: {collector_name} (datasource_id={datasource_id})")
|
||||
result = await collector.run(db)
|
||||
logger.info(f"Collector {collector_name} completed: {result}")
|
||||
except Exception as e:
|
||||
logger.error(f"Collector {collector_name} failed: {e}")
|
||||
|
||||
|
||||
def start_scheduler():
|
||||
"""Start the scheduler with all registered collectors"""
|
||||
collectors = collector_registry.all()
|
||||
|
||||
for name, collector in collectors.items():
|
||||
if collector_registry.is_active(name):
|
||||
scheduler.add_job(
|
||||
run_collector_task,
|
||||
trigger=IntervalTrigger(hours=collector.frequency_hours),
|
||||
id=name,
|
||||
name=name,
|
||||
replace_existing=True,
|
||||
kwargs={"collector_name": name},
|
||||
result = await db.execute(
|
||||
select(CollectionTask).where(
|
||||
CollectionTask.status == "running",
|
||||
CollectionTask.started_at.is_not(None),
|
||||
CollectionTask.started_at < cutoff,
|
||||
)
|
||||
logger.info(f"Scheduled collector: {name} (every {collector.frequency_hours}h)")
|
||||
)
|
||||
stale_tasks = result.scalars().all()
|
||||
|
||||
for task in stale_tasks:
|
||||
task.status = "failed"
|
||||
task.phase = "failed"
|
||||
task.completed_at = datetime.now(UTC)
|
||||
existing_error = (task.error_message or "").strip()
|
||||
cleanup_error = "Marked failed automatically after stale running task cleanup"
|
||||
task.error_message = f"{existing_error}\n{cleanup_error}".strip() if existing_error else cleanup_error
|
||||
|
||||
if stale_tasks:
|
||||
await db.commit()
|
||||
logger.warning("Cleaned up %s stale running collection task(s)", len(stale_tasks))
|
||||
|
||||
return len(stale_tasks)
|
||||
|
||||
|
||||
def start_scheduler() -> None:
|
||||
"""Start the scheduler."""
|
||||
if not scheduler.running:
|
||||
scheduler.start()
|
||||
logger.info("Scheduler started")
|
||||
|
||||
|
||||
def stop_scheduler():
|
||||
"""Stop the scheduler"""
|
||||
scheduler.shutdown()
|
||||
def stop_scheduler() -> None:
|
||||
"""Stop the scheduler."""
|
||||
if scheduler.running:
|
||||
scheduler.shutdown(wait=False)
|
||||
logger.info("Scheduler stopped")
|
||||
|
||||
|
||||
async def sync_scheduler_with_datasources() -> None:
|
||||
"""Synchronize scheduler jobs with datasource table."""
|
||||
async with async_session_factory() as db:
|
||||
result = await db.execute(select(DataSource).order_by(DataSource.id))
|
||||
datasources = result.scalars().all()
|
||||
|
||||
configured_sources = {datasource.source for datasource in datasources}
|
||||
for job in list(scheduler.get_jobs()):
|
||||
if job.id not in configured_sources:
|
||||
scheduler.remove_job(job.id)
|
||||
|
||||
for datasource in datasources:
|
||||
await _apply_datasource_schedule(datasource, db)
|
||||
|
||||
|
||||
async def sync_datasource_job(datasource_id: int) -> bool:
|
||||
"""Synchronize a single datasource job after settings changes."""
|
||||
async with async_session_factory() as db:
|
||||
datasource = await db.get(DataSource, datasource_id)
|
||||
if not datasource:
|
||||
return False
|
||||
|
||||
await _apply_datasource_schedule(datasource, db)
|
||||
return True
|
||||
|
||||
|
||||
def get_scheduler_jobs() -> list[Dict[str, Any]]:
|
||||
"""Get all scheduled jobs"""
|
||||
"""Get all scheduled jobs."""
|
||||
jobs = []
|
||||
for job in scheduler.get_jobs():
|
||||
jobs.append(
|
||||
{
|
||||
"id": job.id,
|
||||
"name": job.name,
|
||||
"next_run_time": job.next_run_time.isoformat() if job.next_run_time else None,
|
||||
"next_run_time": to_iso8601_utc(job.next_run_time),
|
||||
"trigger": str(job.trigger),
|
||||
}
|
||||
)
|
||||
return jobs
|
||||
|
||||
|
||||
def add_job(collector_name: str, hours: int = 4):
|
||||
"""Add a new scheduled job"""
|
||||
collector = collector_registry.get(collector_name)
|
||||
if not collector:
|
||||
raise ValueError(f"Collector not found: {collector_name}")
|
||||
async def get_latest_task_id_for_datasource(datasource_id: int) -> Optional[int]:
|
||||
from app.models.task import CollectionTask
|
||||
|
||||
scheduler.add_job(
|
||||
run_collector_task,
|
||||
trigger=IntervalTrigger(hours=hours),
|
||||
id=collector_name,
|
||||
name=collector_name,
|
||||
replace_existing=True,
|
||||
kwargs={"collector_name": collector_name},
|
||||
async with async_session_factory() as db:
|
||||
result = await db.execute(
|
||||
select(CollectionTask.id)
|
||||
.where(CollectionTask.datasource_id == datasource_id)
|
||||
.order_by(CollectionTask.created_at.desc(), CollectionTask.id.desc())
|
||||
.limit(1)
|
||||
)
|
||||
logger.info(f"Added scheduled job: {collector_name} (every {hours}h)")
|
||||
|
||||
|
||||
def remove_job(collector_name: str):
|
||||
"""Remove a scheduled job"""
|
||||
scheduler.remove_job(collector_name)
|
||||
logger.info(f"Removed scheduled job: {collector_name}")
|
||||
|
||||
|
||||
def pause_job(collector_name: str):
|
||||
"""Pause a scheduled job"""
|
||||
scheduler.pause_job(collector_name)
|
||||
logger.info(f"Paused job: {collector_name}")
|
||||
|
||||
|
||||
def resume_job(collector_name: str):
|
||||
"""Resume a scheduled job"""
|
||||
scheduler.resume_job(collector_name)
|
||||
logger.info(f"Resumed job: {collector_name}")
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
def run_collector_now(collector_name: str) -> bool:
|
||||
"""Run a collector immediately (not scheduled)"""
|
||||
"""Run a collector immediately (not scheduled)."""
|
||||
collector = collector_registry.get(collector_name)
|
||||
if not collector:
|
||||
logger.error(f"Collector not found: {collector_name}")
|
||||
logger.error("Collector not found: %s", collector_name)
|
||||
return False
|
||||
|
||||
try:
|
||||
asyncio.create_task(run_collector_task(collector_name))
|
||||
logger.info(f"Triggered collector: {collector_name}")
|
||||
logger.info("Triggered collector: %s", collector_name)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to trigger collector {collector_name}: {e}")
|
||||
except Exception as exc:
|
||||
logger.error("Failed to trigger collector %s: %s", collector_name, exc)
|
||||
return False
|
||||
|
||||
@@ -16,3 +16,4 @@ email-validator
|
||||
apscheduler>=3.10.4
|
||||
pytest>=7.4.0
|
||||
pytest-asyncio>=0.23.0
|
||||
networkx>=3.0
|
||||
|
||||
74
backend/tests/test_bgp.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""Tests for BGP observability helpers."""
|
||||
|
||||
from app.models.bgp_anomaly import BGPAnomaly
|
||||
from app.services.collectors.bgp_common import normalize_bgp_event
|
||||
from app.services.collectors.bgpstream import BGPStreamBackfillCollector
|
||||
|
||||
|
||||
def test_normalize_bgp_event_from_live_payload():
|
||||
event = normalize_bgp_event(
|
||||
{
|
||||
"collector": "rrc00",
|
||||
"peer_asn": "3333",
|
||||
"peer_ip": "2001:db8::1",
|
||||
"type": "UPDATE",
|
||||
"event_type": "announcement",
|
||||
"prefix": "203.0.113.0/24",
|
||||
"path": ["3333", "64500", "64496"],
|
||||
"communities": ["3333:100"],
|
||||
"timestamp": "2026-03-26T08:00:00Z",
|
||||
},
|
||||
project="ris-live",
|
||||
)
|
||||
|
||||
assert event["name"] == "203.0.113.0/24"
|
||||
assert event["metadata"]["collector"] == "rrc00"
|
||||
assert event["metadata"]["peer_asn"] == 3333
|
||||
assert event["metadata"]["origin_asn"] == 64496
|
||||
assert event["metadata"]["as_path_length"] == 3
|
||||
assert event["metadata"]["prefix_length"] == 24
|
||||
assert event["metadata"]["is_more_specific"] is False
|
||||
|
||||
|
||||
def test_bgpstream_transform_preserves_broker_record():
|
||||
collector = BGPStreamBackfillCollector()
|
||||
transformed = collector.transform(
|
||||
[
|
||||
{
|
||||
"project": "routeviews",
|
||||
"collector": "route-views.sg",
|
||||
"filename": "rib.20260326.0800.gz",
|
||||
"startTime": "2026-03-26T08:00:00Z",
|
||||
"prefix": "198.51.100.0/24",
|
||||
"origin_asn": 64512,
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
assert len(transformed) == 1
|
||||
record = transformed[0]
|
||||
assert record["name"] == "rib.20260326.0800.gz"
|
||||
assert record["metadata"]["project"] == "bgpstream"
|
||||
assert record["metadata"]["broker_record"]["filename"] == "rib.20260326.0800.gz"
|
||||
|
||||
|
||||
def test_bgp_anomaly_to_dict():
|
||||
anomaly = BGPAnomaly(
|
||||
source="ris_live_bgp",
|
||||
anomaly_type="origin_change",
|
||||
severity="critical",
|
||||
status="active",
|
||||
entity_key="origin_change:203.0.113.0/24:64497",
|
||||
prefix="203.0.113.0/24",
|
||||
origin_asn=64496,
|
||||
new_origin_asn=64497,
|
||||
summary="Origin ASN changed",
|
||||
confidence=0.9,
|
||||
evidence={"previous_origins": [64496], "current_origins": [64497]},
|
||||
)
|
||||
|
||||
data = anomaly.to_dict()
|
||||
assert data["source"] == "ris_live_bgp"
|
||||
assert data["anomaly_type"] == "origin_change"
|
||||
assert data["new_origin_asn"] == 64497
|
||||
assert data["evidence"]["previous_origins"] == [64496]
|
||||
@@ -31,45 +31,6 @@ services:
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
backend:
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile
|
||||
container_name: planet_backend
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+asyncpg://postgres:postgres@postgres:5432/planet_db
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
- SECRET_KEY=your-secret-key-change-in-production
|
||||
- CORS_ORIGINS=["http://localhost:3000","http://0.0.0.0:3000","http://frontend:3000"]
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ./frontend
|
||||
dockerfile: Dockerfile
|
||||
container_name: planet_frontend
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
- VITE_API_URL=http://backend:8000/api/v1
|
||||
- VITE_WS_URL=ws://backend:8000/ws
|
||||
depends_on:
|
||||
backend:
|
||||
condition: service_healthy
|
||||
stdin_open: true
|
||||
tty: true
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
|
||||
255
docs/CHANGELOG.md
Normal file
@@ -0,0 +1,255 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to `planet` are documented here.
|
||||
|
||||
This project follows the repository versioning rule:
|
||||
|
||||
- `feature` -> `+0.1.0`
|
||||
- `bugfix` -> `+0.0.1`
|
||||
|
||||
## 0.21.7
|
||||
|
||||
Released: 2026-03-27
|
||||
|
||||
### Highlights
|
||||
|
||||
- Added Earth-side BGP collector visualization support so anomaly markers and collector stations can be explored together.
|
||||
- Refined the collected-data distribution treemap so square tiles better reflect relative volume while staying readable in dense layouts.
|
||||
|
||||
### Added
|
||||
|
||||
- Added `/api/v1/visualization/geo/bgp-collectors` to expose RIPE RIS collector locations as GeoJSON.
|
||||
- Added dedicated Earth collector marker handling and BGP collector detail cards in the Earth runtime.
|
||||
- Added collector-specific BGP visual tuning for altitude, opacity, scale, and pulse behavior.
|
||||
|
||||
### Improved
|
||||
|
||||
- Improved the collected-data distribution treemap with dynamic square-grid sizing, clearer area-based span rules, centered compact tiles, and tooltip coverage on both icons and labels.
|
||||
- Improved compact treemap readability by hiding `1x1` labels, reducing `1x1` value font size, and centering icon/value content.
|
||||
- Improved Earth BGP interactions so anomaly markers and collector markers can both participate in hover, lock, legend, and info-card flows.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed Earth BGP data loading gaps by adding the missing `bgp.js` runtime module required by the current control and visualization flow.
|
||||
- Fixed treemap layout drift where compact tiles could appear oversized or visually inconsistent with the intended square-grid distribution.
|
||||
|
||||
## 0.21.6
|
||||
|
||||
Released: 2026-03-27
|
||||
|
||||
### Highlights
|
||||
|
||||
- Refined the Earth page interaction loop with object-driven legend switching, clearer selection feedback, and cleaner HUD copy/layout behavior.
|
||||
- Improved the Earth info surfaces so status toasts, info-card interactions, and title/subtitle presentation feel more intentional and easier to scan.
|
||||
|
||||
### Added
|
||||
|
||||
- Added click-to-copy support for info-card labels so clicking a field label copies the matching field value.
|
||||
- Added runtime-generated legend content for cables and satellites based on current Earth data and selection state.
|
||||
|
||||
### Improved
|
||||
|
||||
- Improved Earth legend behavior so selected cables and selected satellite categories are promoted to the top of the legend list.
|
||||
- Improved legend overflow handling by constraining the visible list and using scroll for additional entries.
|
||||
- Improved info-panel heading layout with centered title/subtitle styling and better subtitle hierarchy.
|
||||
- Improved status-message behavior with replayable slide-in notifications when messages change in quick succession.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed info-card content spacing by targeting the actual `#info-card-content` node instead of a non-matching class selector.
|
||||
- Fixed cable legend generation so it follows backend-returned cable names and colors instead of stale hard-coded placeholder categories.
|
||||
- Fixed reset-view and legend-related HUD behaviors so selection and legend state stay in sync when users interact with real Earth objects.
|
||||
|
||||
## 0.21.5
|
||||
|
||||
Released: 2026-03-27
|
||||
|
||||
### Highlights
|
||||
|
||||
- Reworked the collected-data overview into a clearer split between KPI cards and a switchable treemap distribution.
|
||||
- Added a direct Earth entry on the dashboard and tightened several admin-side scrolling/layout behaviors.
|
||||
|
||||
### Added
|
||||
|
||||
- Added a dashboard quick-access card linking directly to `/earth`.
|
||||
- Added collected-data treemap switching between `按数据源` and `按类型`.
|
||||
- Added data-type-specific icons for the collected-data overview treemap.
|
||||
|
||||
### Improved
|
||||
|
||||
- Improved collected-data summary behavior so overview counts follow the active filters and search state.
|
||||
- Improved the collected-data treemap with square tiles, a wider default overview panel width, and narrower overview scrollbars.
|
||||
- Improved responsive behavior near the tablet breakpoint so the collected-data page can scroll instead of clipping the overview or crushing the table.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed the user-management table overflow issue by restoring `ant-table-body` to auto height for that page so the outer container no longer incorrectly takes over vertical scrolling.
|
||||
- Fixed overly wide scrollbar presentation in collected-data and related admin surfaces by aligning them with the slimmer in-app scrollbar style.
|
||||
|
||||
## 0.21.3
|
||||
|
||||
Released: 2026-03-27
|
||||
|
||||
### Highlights
|
||||
|
||||
- Upgraded the startup script into a more resilient local control entrypoint with retry-based service boot, selective restart targeting, and guided CLI user creation.
|
||||
- Reduced friction when developing across slower machines by making backend and frontend startup checks more tolerant and operator-friendly.
|
||||
|
||||
### Added
|
||||
|
||||
- Added interactive `createuser` support to [planet.sh](/home/ray/dev/linkong/planet/planet.sh) for CLI-driven username, email, password, and admin-role creation.
|
||||
|
||||
### Improved
|
||||
|
||||
- Improved `start` and `restart` in [planet.sh](/home/ray/dev/linkong/planet/planet.sh) with optional backend/frontend port targeting and on-demand port cleanup.
|
||||
- Improved startup robustness with repeated health checks and automatic retry loops for both backend and frontend services.
|
||||
- Improved restart ergonomics so `restart -b` and `restart -f` can restart only the requested service instead of forcing a full stack restart.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed false startup failures on slower environments where services needed longer than a single fixed wait window to become healthy.
|
||||
- Fixed first-run login dead-end by ensuring a default admin user is created during backend initialization when the database has no users.
|
||||
|
||||
## 0.21.2
|
||||
|
||||
Released: 2026-03-26
|
||||
|
||||
### Highlights
|
||||
|
||||
- Reworked the Earth page HUD into a bottom-centered floating toolbar with grouped popovers and richer interaction feedback.
|
||||
- Unified toolbar and corner cards under a liquid-glass visual language and refined status toasts, object info cards, and legend behavior.
|
||||
- Made the legend state reflect the currently selected Earth object instead of a fixed static list.
|
||||
|
||||
### Added
|
||||
|
||||
- Added a reusable Earth legend module in [legend.js](/home/ray/dev/linkong/planet/frontend/public/earth/js/legend.js).
|
||||
- Added Material Symbols-based Earth toolbar icons and dedicated fullscreen-collapse icon support.
|
||||
- Added click-to-copy support for info-card field labels.
|
||||
|
||||
### Improved
|
||||
|
||||
- Improved Earth toolbar layout with centered floating controls, popover-based display toggles, and zoom controls.
|
||||
- Improved Earth HUD visuals with liquid-glass styling for buttons, info cards, panels, and animated status messages.
|
||||
- Improved info-card spacing, scrollbar styling, and object detail readability.
|
||||
- Improved legend rendering so cable and satellite object selection can drive the displayed legend content.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed tooltip coverage and splash copy mismatches in the Earth page controls.
|
||||
- Fixed several toolbar icon clarity, centering, and state-toggle issues.
|
||||
- Fixed status-message behavior so repeated notifications replay the slide-in animation.
|
||||
|
||||
## 0.20.0
|
||||
|
||||
Released: 2026-03-26
|
||||
|
||||
### Highlights
|
||||
|
||||
- Stabilized the Earth big-screen module for longer-running sessions.
|
||||
- Fixed satellite orbit generation by correcting TLE handling end to end.
|
||||
- Added a reusable backend TLE helper and exposed `tle_line1 / tle_line2` to the Earth frontend.
|
||||
|
||||
### Added
|
||||
|
||||
- Added a dedicated Earth module remediation plan in [earth-module-plan.md](/home/ray/dev/linkong/planet/docs/earth-module-plan.md).
|
||||
- Added backend TLE helpers in [satellite_tle.py](/home/ray/dev/linkong/planet/backend/app/core/satellite_tle.py).
|
||||
- Added backend support for returning `tle_line1` and `tle_line2` from the satellite visualization API.
|
||||
|
||||
### Improved
|
||||
|
||||
- Reworked the Earth module lifecycle with cleaner init, reload, and destroy paths.
|
||||
- Improved scene cleanup for cables, landing points, satellite markers, and related runtime state.
|
||||
- Reduced Earth interaction overhead by reusing hot-path math and pointer objects.
|
||||
- Switched satellite animation timing to real delta-based updates for more stable motion.
|
||||
- Reduced fragile global-state coupling inside the legacy Earth runtime.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed white-screen risk caused by iframe cleanup behavior in development mode.
|
||||
- Fixed incorrect client-side TLE generation:
|
||||
- corrected line 2 field ordering
|
||||
- corrected eccentricity formatting
|
||||
- added checksum generation
|
||||
- Fixed fallback orbit issues affecting some Starlink satellites such as `STARLINK-36158`.
|
||||
- Fixed partial Earth data load failures so one failed source is less likely to break the whole view.
|
||||
|
||||
### Notes
|
||||
|
||||
- The Earth frontend now prefers backend-provided raw TLE lines.
|
||||
- Older satellite records can still fall back to backend-generated TLE lines when raw lines are unavailable.
|
||||
- This release is primarily focused on Earth module stability rather than visible admin UI changes.
|
||||
|
||||
## 0.21.1
|
||||
|
||||
Released: 2026-03-26
|
||||
|
||||
### Highlights
|
||||
|
||||
- Refined the Earth big-screen toolbar with clearer controls, hover hints, and more consistent visual language.
|
||||
- Replaced emoji-based Earth toolbar controls with SVG icons for a cleaner HUD.
|
||||
- Updated the Earth loading splash so manual reloads no longer show legacy wording.
|
||||
|
||||
### Improved
|
||||
|
||||
- Improved zoom controls by adding tooltips for reset view, zoom in, zoom out, and resetting zoom to `100%`.
|
||||
- Improved Earth toolbar readability with larger icons and revised glyphs for rotation, reload, satellites, trails, cables, terrain, and collapse.
|
||||
- Improved loading overlay copy to better distinguish initial initialization from manual refresh.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed rotate toggle rendering so play/pause state no longer relies on emoji text replacement.
|
||||
- Fixed Earth autorotation target syncing so inertial drag is preserved while the globe is still coasting.
|
||||
|
||||
## 0.21.0
|
||||
|
||||
Released: 2026-03-26
|
||||
|
||||
### Highlights
|
||||
|
||||
- Added legacy-inspired inertial drag behavior to the Earth big-screen module.
|
||||
- Removed the hard 10,000-satellite ceiling when Earth satellite loading is configured as unlimited.
|
||||
- Tightened Earth toolbar and hover-state synchronization for a more consistent runtime feel.
|
||||
|
||||
### Added
|
||||
|
||||
- Added inertial drag state and smoothing to the Earth runtime so drag release now decays naturally.
|
||||
|
||||
### Improved
|
||||
|
||||
- Improved drag handling so moving the pointer outside the canvas no longer prematurely stops rotation.
|
||||
- Improved satellite loading to support dynamic frontend buffer sizing when no explicit limit is set.
|
||||
- Improved Earth interaction fidelity by keeping the hover ring synchronized with moving satellites.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed the trails toolbar button so its default visual state matches the actual default runtime state.
|
||||
- Fixed the satellite GeoJSON endpoint so omitting `limit` no longer silently falls back to `10000`.
|
||||
- Fixed hover ring lag where the ring could stay behind the satellite until the next mouse move.
|
||||
|
||||
## 0.19.0
|
||||
|
||||
Released: 2026-03-25
|
||||
|
||||
### Highlights
|
||||
|
||||
- Refined data collection storage and history handling.
|
||||
- Moved collected data away from several strongly coupled legacy columns.
|
||||
- Improved data list filtering, metadata-driven detail rendering, and collection workflows.
|
||||
|
||||
### Added
|
||||
|
||||
- Added collected data history planning docs.
|
||||
- Added metadata backfill and removal-readiness scripts.
|
||||
- Added version history tracking.
|
||||
|
||||
### Improved
|
||||
|
||||
- Improved datasource task tracking and collection status flow.
|
||||
- Improved collected data search, filtering, and metadata rendering.
|
||||
- Improved configuration center layout consistency across admin pages.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed several collected-data field mapping issues.
|
||||
- Fixed frontend table layout inconsistencies across multiple admin pages.
|
||||
- Fixed TOP500 parsing and related metadata alignment issues.
|
||||
487
docs/bgp-observability-plan.md
Normal file
@@ -0,0 +1,487 @@
|
||||
# BGP Observability Plan
|
||||
|
||||
## Goal
|
||||
|
||||
Build a global routing observability capability on top of:
|
||||
|
||||
- [RIPE RIS Live](https://ris-live.ripe.net/)
|
||||
- [CAIDA BGPStream data access overview](https://bgpstream.caida.org/docs/overview/data-access)
|
||||
|
||||
The target is to support:
|
||||
|
||||
- real-time routing event ingestion
|
||||
- historical replay and baseline analysis
|
||||
- anomaly detection
|
||||
- Earth big-screen visualization
|
||||
|
||||
## Important Scope Note
|
||||
|
||||
These data sources expose the BGP control plane, not user traffic itself.
|
||||
|
||||
That means the system can infer:
|
||||
|
||||
- route propagation direction
|
||||
- prefix reachability changes
|
||||
- AS path changes
|
||||
- visibility changes across collectors
|
||||
|
||||
But it cannot directly measure:
|
||||
|
||||
- exact application traffic volume
|
||||
- exact user packet path
|
||||
- real bandwidth consumption between countries or operators
|
||||
|
||||
Product wording should therefore use phrases like:
|
||||
|
||||
- global routing propagation
|
||||
- route visibility
|
||||
- control-plane anomalies
|
||||
- suspected path diversion
|
||||
|
||||
Instead of claiming direct traffic measurement.
|
||||
|
||||
## Data Source Roles
|
||||
|
||||
### RIS Live
|
||||
|
||||
Use RIS Live as the real-time feed.
|
||||
|
||||
Recommended usage:
|
||||
|
||||
- subscribe to update streams over WebSocket
|
||||
- ingest announcements and withdrawals continuously
|
||||
- trigger low-latency alerts
|
||||
|
||||
Best suited for:
|
||||
|
||||
- hijack suspicion
|
||||
- withdrawal bursts
|
||||
- real-time path changes
|
||||
- live Earth event overlay
|
||||
|
||||
### BGPStream
|
||||
|
||||
Use BGPStream as the historical and replay layer.
|
||||
|
||||
Recommended usage:
|
||||
|
||||
- backfill time windows
|
||||
- build normal baselines
|
||||
- compare current events against history
|
||||
- support investigations and playback
|
||||
|
||||
Best suited for:
|
||||
|
||||
- historical anomaly confirmation
|
||||
- baseline path frequency
|
||||
- visibility baselines
|
||||
- postmortem analysis
|
||||
|
||||
## Recommended Architecture
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
A["RIS Live WebSocket"] --> B["Realtime Collector"]
|
||||
C["BGPStream Historical Access"] --> D["Backfill Collector"]
|
||||
B --> E["Normalization Layer"]
|
||||
D --> E
|
||||
E --> F["data_snapshots"]
|
||||
E --> G["collected_data"]
|
||||
E --> H["bgp_anomalies"]
|
||||
H --> I["Alerts API"]
|
||||
G --> J["Visualization API"]
|
||||
H --> J
|
||||
J --> K["Earth Big Screen"]
|
||||
```
|
||||
|
||||
## Storage Design
|
||||
|
||||
The current project already has:
|
||||
|
||||
- [data_snapshot.py](/home/ray/dev/linkong/planet/backend/app/models/data_snapshot.py)
|
||||
- [collected_data.py](/home/ray/dev/linkong/planet/backend/app/models/collected_data.py)
|
||||
|
||||
So the lowest-risk path is:
|
||||
|
||||
1. keep raw and normalized BGP events in `collected_data`
|
||||
2. use `data_snapshots` to group each ingest window
|
||||
3. add a dedicated anomaly table for higher-value derived events
|
||||
|
||||
## Proposed Data Types
|
||||
|
||||
### `collected_data`
|
||||
|
||||
Use these `source` values:
|
||||
|
||||
- `ris_live_bgp`
|
||||
- `bgpstream_bgp`
|
||||
|
||||
Use these `data_type` values:
|
||||
|
||||
- `bgp_update`
|
||||
- `bgp_rib`
|
||||
- `bgp_visibility`
|
||||
- `bgp_path_change`
|
||||
|
||||
Recommended stable fields:
|
||||
|
||||
- `source`
|
||||
- `source_id`
|
||||
- `entity_key`
|
||||
- `data_type`
|
||||
- `name`
|
||||
- `reference_date`
|
||||
- `metadata`
|
||||
|
||||
Recommended `entity_key` strategy:
|
||||
|
||||
- event entity: `collector|peer|prefix|event_time`
|
||||
- prefix state entity: `collector|peer|prefix`
|
||||
- origin state entity: `prefix|origin_asn`
|
||||
|
||||
### `metadata` schema for raw events
|
||||
|
||||
Store the normalized event payload in `metadata`:
|
||||
|
||||
```json
|
||||
{
|
||||
"project": "ris-live",
|
||||
"collector": "rrc00",
|
||||
"peer_asn": 3333,
|
||||
"peer_ip": "2001:db8::1",
|
||||
"event_type": "announcement",
|
||||
"prefix": "203.0.113.0/24",
|
||||
"origin_asn": 64496,
|
||||
"as_path": [3333, 64500, 64496],
|
||||
"communities": ["3333:100", "64500:1"],
|
||||
"next_hop": "192.0.2.1",
|
||||
"med": 0,
|
||||
"local_pref": null,
|
||||
"timestamp": "2026-03-26T08:00:00Z",
|
||||
"raw_message": {}
|
||||
}
|
||||
```
|
||||
|
||||
### New anomaly table
|
||||
|
||||
Add a new table, recommended name: `bgp_anomalies`
|
||||
|
||||
Suggested columns:
|
||||
|
||||
- `id`
|
||||
- `snapshot_id`
|
||||
- `task_id`
|
||||
- `source`
|
||||
- `anomaly_type`
|
||||
- `severity`
|
||||
- `status`
|
||||
- `entity_key`
|
||||
- `prefix`
|
||||
- `origin_asn`
|
||||
- `new_origin_asn`
|
||||
- `peer_scope`
|
||||
- `started_at`
|
||||
- `ended_at`
|
||||
- `confidence`
|
||||
- `summary`
|
||||
- `evidence`
|
||||
- `created_at`
|
||||
|
||||
This table should represent derived intelligence, not raw updates.
|
||||
|
||||
## Collector Design
|
||||
|
||||
## 1. `RISLiveCollector`
|
||||
|
||||
Responsibility:
|
||||
|
||||
- maintain WebSocket connection
|
||||
- subscribe to relevant message types
|
||||
- normalize messages
|
||||
- write event batches into snapshots
|
||||
- optionally emit derived anomalies in near real time
|
||||
|
||||
Suggested runtime mode:
|
||||
|
||||
- long-running background task
|
||||
|
||||
Suggested snapshot strategy:
|
||||
|
||||
- one snapshot per rolling time window
|
||||
- for example every 1 minute or every 5 minutes
|
||||
|
||||
## 2. `BGPStreamBackfillCollector`
|
||||
|
||||
Responsibility:
|
||||
|
||||
- fetch historical data windows
|
||||
- normalize to the same schema as real-time data
|
||||
- build baselines
|
||||
- re-run anomaly rules on past windows if needed
|
||||
|
||||
Suggested runtime mode:
|
||||
|
||||
- scheduled task
|
||||
- or ad hoc task for investigations
|
||||
|
||||
Suggested snapshot strategy:
|
||||
|
||||
- one snapshot per historical query window
|
||||
|
||||
## Normalization Rules
|
||||
|
||||
Normalize both sources into the same internal event model.
|
||||
|
||||
Required normalized fields:
|
||||
|
||||
- `collector`
|
||||
- `peer_asn`
|
||||
- `peer_ip`
|
||||
- `event_type`
|
||||
- `prefix`
|
||||
- `origin_asn`
|
||||
- `as_path`
|
||||
- `timestamp`
|
||||
|
||||
Derived normalized fields:
|
||||
|
||||
- `as_path_length`
|
||||
- `country_guess`
|
||||
- `prefix_length`
|
||||
- `is_more_specific`
|
||||
- `visibility_weight`
|
||||
|
||||
## Anomaly Detection Rules
|
||||
|
||||
Start with these five rules first.
|
||||
|
||||
### 1. Origin ASN Change
|
||||
|
||||
Trigger when:
|
||||
|
||||
- the same prefix is announced by a new origin ASN not seen in the baseline window
|
||||
|
||||
Use for:
|
||||
|
||||
- hijack suspicion
|
||||
- origin drift detection
|
||||
|
||||
### 2. More-Specific Burst
|
||||
|
||||
Trigger when:
|
||||
|
||||
- a more-specific prefix appears suddenly
|
||||
- especially from an unexpected origin ASN
|
||||
|
||||
Use for:
|
||||
|
||||
- subprefix hijack suspicion
|
||||
|
||||
### 3. Mass Withdrawal
|
||||
|
||||
Trigger when:
|
||||
|
||||
- the same prefix or ASN sees many withdrawals across collectors within a short window
|
||||
|
||||
Use for:
|
||||
|
||||
- outage suspicion
|
||||
- regional incident detection
|
||||
|
||||
### 4. Path Deviation
|
||||
|
||||
Trigger when:
|
||||
|
||||
- AS path length jumps sharply
|
||||
- or a rarely seen transit ASN appears
|
||||
- or path frequency drops below baseline norms
|
||||
|
||||
Use for:
|
||||
|
||||
- route leak suspicion
|
||||
- unusual path diversion
|
||||
|
||||
### 5. Visibility Drop
|
||||
|
||||
Trigger when:
|
||||
|
||||
- a prefix is visible from far fewer collectors/peers than its baseline
|
||||
|
||||
Use for:
|
||||
|
||||
- regional reachability degradation
|
||||
|
||||
## Baseline Strategy
|
||||
|
||||
Use BGPStream historical data to build:
|
||||
|
||||
- common origin ASN per prefix
|
||||
- common AS path patterns
|
||||
- collector visibility distribution
|
||||
- normal withdrawal frequency
|
||||
|
||||
Recommended baseline windows:
|
||||
|
||||
- short baseline: last 24 hours
|
||||
- medium baseline: last 7 days
|
||||
- long baseline: last 30 days
|
||||
|
||||
The first implementation can start with only the 7-day baseline.
|
||||
|
||||
## API Design
|
||||
|
||||
### Raw event API
|
||||
|
||||
Add endpoints like:
|
||||
|
||||
- `GET /api/v1/bgp/events`
|
||||
- `GET /api/v1/bgp/events/{id}`
|
||||
|
||||
Suggested filters:
|
||||
|
||||
- `prefix`
|
||||
- `origin_asn`
|
||||
- `peer_asn`
|
||||
- `collector`
|
||||
- `event_type`
|
||||
- `time_from`
|
||||
- `time_to`
|
||||
- `source`
|
||||
|
||||
### Anomaly API
|
||||
|
||||
Add endpoints like:
|
||||
|
||||
- `GET /api/v1/bgp/anomalies`
|
||||
- `GET /api/v1/bgp/anomalies/{id}`
|
||||
- `GET /api/v1/bgp/anomalies/summary`
|
||||
|
||||
Suggested filters:
|
||||
|
||||
- `severity`
|
||||
- `anomaly_type`
|
||||
- `status`
|
||||
- `prefix`
|
||||
- `origin_asn`
|
||||
- `time_from`
|
||||
- `time_to`
|
||||
|
||||
### Visualization API
|
||||
|
||||
Add an Earth-oriented endpoint like:
|
||||
|
||||
- `GET /api/v1/visualization/geo/bgp-anomalies`
|
||||
|
||||
Recommended feature shapes:
|
||||
|
||||
- point: collector locations
|
||||
- arc: inferred propagation or suspicious path edge
|
||||
- pulse point: active anomaly hotspot
|
||||
|
||||
## Earth Big-Screen Design
|
||||
|
||||
Recommended layers:
|
||||
|
||||
### Layer 1: Collector layer
|
||||
|
||||
Show known collector locations and current activity intensity.
|
||||
|
||||
### Layer 2: Route propagation arcs
|
||||
|
||||
Use arcs for:
|
||||
|
||||
- origin ASN country to collector country
|
||||
- or collector-to-collector visibility edges
|
||||
|
||||
Important note:
|
||||
|
||||
This is an inferred propagation view, not real packet flow.
|
||||
|
||||
### Layer 3: Active anomaly overlay
|
||||
|
||||
Show:
|
||||
|
||||
- hijack suspicion in red
|
||||
- mass withdrawal in orange
|
||||
- visibility drop in yellow
|
||||
- path deviation in blue
|
||||
|
||||
### Layer 4: Time playback
|
||||
|
||||
Use `data_snapshots` to replay:
|
||||
|
||||
- minute-by-minute route changes
|
||||
- anomaly expansion
|
||||
- recovery timeline
|
||||
|
||||
## Alerting Strategy
|
||||
|
||||
Map anomaly severity to the current alert system.
|
||||
|
||||
Recommended severity mapping:
|
||||
|
||||
- `critical`
|
||||
- likely hijack
|
||||
- very large withdrawal burst
|
||||
- `high`
|
||||
- clear origin change
|
||||
- large visibility drop
|
||||
- `medium`
|
||||
- unusual path change
|
||||
- moderate more-specific burst
|
||||
- `low`
|
||||
- weak or localized anomalies
|
||||
|
||||
## Delivery Plan
|
||||
|
||||
### Phase 1
|
||||
|
||||
- add `RISLiveCollector`
|
||||
- normalize updates into `collected_data`
|
||||
- create `bgp_anomalies`
|
||||
- implement 3 rules:
|
||||
- origin change
|
||||
- more-specific burst
|
||||
- mass withdrawal
|
||||
|
||||
### Phase 2
|
||||
|
||||
- add `BGPStreamBackfillCollector`
|
||||
- build 7-day baseline
|
||||
- implement:
|
||||
- path deviation
|
||||
- visibility drop
|
||||
|
||||
### Phase 3
|
||||
|
||||
- add Earth visualization layer
|
||||
- add time playback
|
||||
- add anomaly filtering and drilldown
|
||||
|
||||
## Practical Implementation Notes
|
||||
|
||||
- Start with IPv4 first, then add IPv6 after the event schema is stable.
|
||||
- Store the original raw payload in `metadata.raw_message` for traceability.
|
||||
- Deduplicate events by a stable hash of collector, peer, prefix, type, and timestamp.
|
||||
- Keep anomaly generation idempotent so replay and backfill do not create duplicate alerts.
|
||||
- Expect noisy data and partial views; confidence scoring matters.
|
||||
|
||||
## Recommended First Patch Set
|
||||
|
||||
The first code milestone should include:
|
||||
|
||||
1. `backend/app/services/collectors/ris_live.py`
|
||||
2. `backend/app/services/collectors/bgpstream.py`
|
||||
3. `backend/app/models/bgp_anomaly.py`
|
||||
4. `backend/app/api/v1/bgp.py`
|
||||
5. `backend/app/api/v1/visualization.py`
|
||||
add BGP anomaly geo endpoint
|
||||
6. `frontend/src/pages`
|
||||
add a BGP anomaly list or summary page
|
||||
7. `frontend/public/earth/js`
|
||||
add BGP anomaly rendering layer
|
||||
|
||||
## Sources
|
||||
|
||||
- [RIPE RIS Live](https://ris-live.ripe.net/)
|
||||
- [CAIDA BGPStream Data Access Overview](https://bgpstream.caida.org/docs/overview/data-access)
|
||||
207
docs/collected-data-column-removal-plan.md
Normal file
@@ -0,0 +1,207 @@
|
||||
# collected_data 强耦合列拆除计划
|
||||
|
||||
## 背景
|
||||
|
||||
当前 `collected_data` 同时承担了两类职责:
|
||||
|
||||
1. 通用采集事实表
|
||||
2. 少数数据源的宽表字段承载
|
||||
|
||||
典型强耦合列包括:
|
||||
|
||||
- `country`
|
||||
- `city`
|
||||
- `latitude`
|
||||
- `longitude`
|
||||
- `value`
|
||||
- `unit`
|
||||
|
||||
以及 API 层临时平铺出来的:
|
||||
|
||||
- `cores`
|
||||
- `rmax`
|
||||
- `rpeak`
|
||||
- `power`
|
||||
|
||||
这些字段并不适合作为统一事实表的长期 schema。
|
||||
推荐方向是:
|
||||
|
||||
- 表内保留通用稳定字段
|
||||
- 业务差异字段全部归入 `metadata`
|
||||
- API 和前端动态读取 `metadata`
|
||||
|
||||
## 拆除目标
|
||||
|
||||
最终希望 `collected_data` 只保留:
|
||||
|
||||
- `id`
|
||||
- `snapshot_id`
|
||||
- `task_id`
|
||||
- `source`
|
||||
- `source_id`
|
||||
- `entity_key`
|
||||
- `data_type`
|
||||
- `name`
|
||||
- `title`
|
||||
- `description`
|
||||
- `metadata`
|
||||
- `collected_at`
|
||||
- `reference_date`
|
||||
- `is_valid`
|
||||
- `is_current`
|
||||
- `previous_record_id`
|
||||
- `change_type`
|
||||
- `change_summary`
|
||||
- `deleted_at`
|
||||
|
||||
## 计划阶段
|
||||
|
||||
### Phase 1:读取层去依赖
|
||||
|
||||
目标:
|
||||
|
||||
- API / 可视化 / 前端不再优先依赖宽列表字段
|
||||
- 所有动态字段优先从 `metadata` 取
|
||||
|
||||
当前已完成:
|
||||
|
||||
- 新写入数据时,将 `country/city/latitude/longitude/value/unit` 自动镜像到 `metadata`
|
||||
- `/api/v1/collected` 优先从 `metadata` 取动态字段
|
||||
- `visualization` 接口优先从 `metadata` 取动态字段
|
||||
- 国家筛选已改成只走 `metadata->>'country'`
|
||||
- `CollectedData.to_dict()` 已切到 metadata-first
|
||||
- 变更比较逻辑已切到 metadata-first
|
||||
- 已新增历史回填脚本:
|
||||
[scripts/backfill_collected_data_metadata.py](/home/ray/dev/linkong/planet/scripts/backfill_collected_data_metadata.py)
|
||||
- 已新增删列脚本:
|
||||
[scripts/drop_collected_data_legacy_columns.py](/home/ray/dev/linkong/planet/scripts/drop_collected_data_legacy_columns.py)
|
||||
|
||||
涉及文件:
|
||||
|
||||
- [backend/app/core/collected_data_fields.py](/home/ray/dev/linkong/planet/backend/app/core/collected_data_fields.py)
|
||||
- [backend/app/services/collectors/base.py](/home/ray/dev/linkong/planet/backend/app/services/collectors/base.py)
|
||||
- [backend/app/api/v1/collected_data.py](/home/ray/dev/linkong/planet/backend/app/api/v1/collected_data.py)
|
||||
- [backend/app/api/v1/visualization.py](/home/ray/dev/linkong/planet/backend/app/api/v1/visualization.py)
|
||||
|
||||
### Phase 2:写入层去依赖
|
||||
|
||||
目标:
|
||||
|
||||
- 采集器内部不再把这些字段当作数据库一级列来理解
|
||||
- 统一只写:
|
||||
- 通用主字段
|
||||
- `metadata`
|
||||
|
||||
建议动作:
|
||||
|
||||
1. Collector 内部仍可使用 `country/city/value` 这种临时字段作为采集过程变量
|
||||
2. 进入 `BaseCollector._save_data()` 后统一归档到 `metadata`
|
||||
3. `CollectedData` 模型中的强耦合列已从 ORM 移除,写入统一归档到 `metadata`
|
||||
|
||||
### Phase 3:数据库删列
|
||||
|
||||
目标:
|
||||
|
||||
- 从 `collected_data` 真正移除以下列:
|
||||
- `country`
|
||||
- `city`
|
||||
- `latitude`
|
||||
- `longitude`
|
||||
- `value`
|
||||
- `unit`
|
||||
|
||||
注意:
|
||||
|
||||
- `cores / rmax / rpeak / power` 当前本来就在 `metadata` 里,不是表列
|
||||
- 这四个主要是 API 平铺字段,不需要数据库删列
|
||||
|
||||
## 当前阻塞点
|
||||
|
||||
在正式删列前,还需要确认这些地方已经完全不再直接依赖数据库列:
|
||||
|
||||
### 1. `CollectedData.to_dict()`
|
||||
|
||||
文件:
|
||||
|
||||
- [backend/app/models/collected_data.py](/home/ray/dev/linkong/planet/backend/app/models/collected_data.py)
|
||||
|
||||
状态:
|
||||
|
||||
- 已完成
|
||||
|
||||
### 2. 差异计算逻辑
|
||||
|
||||
文件:
|
||||
|
||||
- [backend/app/services/collectors/base.py](/home/ray/dev/linkong/planet/backend/app/services/collectors/base.py)
|
||||
|
||||
状态:
|
||||
|
||||
- 已完成
|
||||
- 当前已改成比较归一化后的 metadata-first payload
|
||||
|
||||
### 3. 历史数据回填
|
||||
|
||||
问题:
|
||||
|
||||
- 老数据可能只有列值,没有对应 `metadata`
|
||||
|
||||
当前方案:
|
||||
|
||||
- 在删列前执行一次回填脚本:
|
||||
- [scripts/backfill_collected_data_metadata.py](/home/ray/dev/linkong/planet/scripts/backfill_collected_data_metadata.py)
|
||||
|
||||
### 4. 导出格式兼容
|
||||
|
||||
文件:
|
||||
|
||||
- [backend/app/api/v1/collected_data.py](/home/ray/dev/linkong/planet/backend/app/api/v1/collected_data.py)
|
||||
|
||||
现状:
|
||||
|
||||
- CSV/JSON 导出已基本切成 metadata-first
|
||||
|
||||
建议:
|
||||
|
||||
- 删列前再回归检查一次导出字段是否一致
|
||||
|
||||
## 推荐执行顺序
|
||||
|
||||
1. 保持新数据写入时 `metadata` 完整
|
||||
2. 把模型和 diff 逻辑完全切成 metadata-first
|
||||
3. 写一条历史回填脚本
|
||||
4. 回填后观察一轮
|
||||
5. 正式执行删列迁移
|
||||
|
||||
## 推荐迁移 SQL
|
||||
|
||||
仅在确认全部读取链路已去依赖后执行:
|
||||
|
||||
```sql
|
||||
ALTER TABLE collected_data
|
||||
DROP COLUMN IF EXISTS country,
|
||||
DROP COLUMN IF EXISTS city,
|
||||
DROP COLUMN IF EXISTS latitude,
|
||||
DROP COLUMN IF EXISTS longitude,
|
||||
DROP COLUMN IF EXISTS value,
|
||||
DROP COLUMN IF EXISTS unit;
|
||||
```
|
||||
|
||||
## 风险提示
|
||||
|
||||
1. 地图类接口对经纬度最敏感
|
||||
必须确保所有地图需要的记录,其 `metadata.latitude/longitude` 已回填完整。
|
||||
|
||||
2. 历史老数据如果没有回填,删列后会直接丢失这些信息。
|
||||
|
||||
3. 某些 collector 可能仍隐式依赖这些宽字段做差异比较,删列前必须做一次全量回归。
|
||||
|
||||
## 当前判断
|
||||
|
||||
当前项目已经完成“代码去依赖 + 历史回填 + readiness 检查”。
|
||||
下一步执行顺序建议固定为:
|
||||
|
||||
1. 先部署当前代码版本并重启后端
|
||||
2. 再做一轮功能回归
|
||||
3. 最后执行:
|
||||
`uv run python scripts/drop_collected_data_legacy_columns.py`
|
||||
402
docs/collected-data-history-plan.md
Normal file
@@ -0,0 +1,402 @@
|
||||
# 采集数据历史快照化改造方案
|
||||
|
||||
## 背景
|
||||
|
||||
当前系统的 `collected_data` 更接近“当前结果表”:
|
||||
|
||||
- 同一个 `source + source_id` 会被更新覆盖
|
||||
- 前端列表页默认读取这张表
|
||||
- `collection_tasks` 只记录任务执行状态,不直接承载数据版本语义
|
||||
|
||||
这套方式适合管理后台,但不利于后续做态势感知、时间回放、趋势分析和版本对比。
|
||||
如果后面需要回答下面这类问题,当前模型会比较吃力:
|
||||
|
||||
- 某条实体在过去 7 天如何变化
|
||||
- 某次采集相比上次新增了什么、删除了什么、值变了什么
|
||||
- 某个时刻地图上“当时的世界状态”是什么
|
||||
- 告警是在第几次采集后触发的
|
||||
|
||||
因此建议把采集数据改造成“历史快照 + 当前视图”模型。
|
||||
|
||||
## 目标
|
||||
|
||||
1. 每次触发采集都保留一份独立快照,历史可追溯。
|
||||
2. 管理后台默认仍然只看“当前最新状态”,不增加使用复杂度。
|
||||
3. 后续支持:
|
||||
- 时间线回放
|
||||
- 两次采集差异对比
|
||||
- 趋势分析
|
||||
- 按快照回溯告警和地图状态
|
||||
4. 尽量兼容现有接口,降低改造成本。
|
||||
|
||||
## 结论
|
||||
|
||||
不建议继续用以下两种单一模式:
|
||||
|
||||
- 直接覆盖旧数据
|
||||
问题:没有历史,无法回溯。
|
||||
|
||||
- 软删除旧数据再全量新增
|
||||
问题:语义不清,历史和“当前无效”混在一起,后续统计复杂。
|
||||
|
||||
推荐方案:
|
||||
|
||||
- 保留历史事实表
|
||||
- 维护当前视图
|
||||
- 每次采集对应一个明确的快照批次
|
||||
|
||||
## 推荐数据模型
|
||||
|
||||
### 方案概览
|
||||
|
||||
建议拆成三层:
|
||||
|
||||
1. `collection_tasks`
|
||||
继续作为采集任务表,表示“这次采集任务”。
|
||||
|
||||
2. `data_snapshots`
|
||||
新增快照表,表示“某个数据源在某次任务中产出的一个快照批次”。
|
||||
|
||||
3. `collected_data`
|
||||
从“当前结果表”升级为“历史事实表”,每一行归属于一个快照。
|
||||
|
||||
同时再提供一个“当前视图”:
|
||||
|
||||
- SQL View / 物化视图 / API 查询层封装均可
|
||||
- 语义是“每个 `source + source_id` 的最新有效记录”
|
||||
|
||||
### 新增表:`data_snapshots`
|
||||
|
||||
建议字段:
|
||||
|
||||
| 字段 | 类型 | 含义 |
|
||||
|---|---|---|
|
||||
| `id` | bigint PK | 快照主键 |
|
||||
| `datasource_id` | int | 对应数据源 |
|
||||
| `task_id` | int | 对应采集任务 |
|
||||
| `source` | varchar(100) | 数据源名,如 `top500` |
|
||||
| `snapshot_key` | varchar(100) | 可选,业务快照标识 |
|
||||
| `reference_date` | timestamptz nullable | 这批数据的参考时间 |
|
||||
| `started_at` | timestamptz | 快照开始时间 |
|
||||
| `completed_at` | timestamptz | 快照完成时间 |
|
||||
| `record_count` | int | 快照总记录数 |
|
||||
| `status` | varchar(20) | `running/success/failed/partial` |
|
||||
| `is_current` | bool | 当前是否是该数据源最新快照 |
|
||||
| `parent_snapshot_id` | bigint nullable | 上一版快照,可用于 diff |
|
||||
| `summary` | jsonb | 本次快照统计摘要 |
|
||||
|
||||
说明:
|
||||
|
||||
- `collection_tasks` 偏“执行过程”
|
||||
- `data_snapshots` 偏“数据版本”
|
||||
- 一个任务通常对应一个快照,但保留分层更清晰
|
||||
|
||||
### 升级表:`collected_data`
|
||||
|
||||
建议新增字段:
|
||||
|
||||
| 字段 | 类型 | 含义 |
|
||||
|---|---|---|
|
||||
| `snapshot_id` | bigint not null | 归属快照 |
|
||||
| `task_id` | int nullable | 归属任务,便于追查 |
|
||||
| `entity_key` | varchar(255) | 实体稳定键,通常可由 `source + source_id` 派生 |
|
||||
| `is_current` | bool | 当前是否为该实体最新记录 |
|
||||
| `previous_record_id` | bigint nullable | 上一个版本的记录 |
|
||||
| `change_type` | varchar(20) | `created/updated/unchanged/deleted` |
|
||||
| `change_summary` | jsonb | 字段变化摘要 |
|
||||
| `deleted_at` | timestamptz nullable | 对应“本次快照中消失”的实体 |
|
||||
|
||||
保留现有字段:
|
||||
|
||||
- `source`
|
||||
- `source_id`
|
||||
- `data_type`
|
||||
- `name`
|
||||
- `title`
|
||||
- `description`
|
||||
- `country`
|
||||
- `city`
|
||||
- `latitude`
|
||||
- `longitude`
|
||||
- `value`
|
||||
- `unit`
|
||||
- `metadata`
|
||||
- `collected_at`
|
||||
- `reference_date`
|
||||
- `is_valid`
|
||||
|
||||
### 当前视图
|
||||
|
||||
建议新增一个只读视图:
|
||||
|
||||
`current_collected_data`
|
||||
|
||||
语义:
|
||||
|
||||
- 对每个 `source + source_id` 只保留最新一条 `is_current = true` 且 `deleted_at is null` 的记录
|
||||
|
||||
这样:
|
||||
|
||||
- 管理后台继续像现在一样查“当前数据”
|
||||
- 历史分析查 `collected_data`
|
||||
|
||||
## 写入策略
|
||||
|
||||
### 触发按钮语义
|
||||
|
||||
“触发”不再理解为“覆盖旧表”,而是:
|
||||
|
||||
- 启动一次新的采集任务
|
||||
- 生成一个新的快照
|
||||
- 将本次结果写入历史事实表
|
||||
- 再更新当前视图标记
|
||||
|
||||
### 写入流程
|
||||
|
||||
1. 创建 `collection_tasks` 记录,状态 `running`
|
||||
2. 创建 `data_snapshots` 记录,状态 `running`
|
||||
3. 采集器拉取原始数据并标准化
|
||||
4. 为每条记录生成 `entity_key`
|
||||
- 推荐:`{source}:{source_id}`
|
||||
5. 将本次记录批量写入 `collected_data`
|
||||
6. 与上一个快照做比对,计算:
|
||||
- 新增
|
||||
- 更新
|
||||
- 未变
|
||||
- 删除
|
||||
7. 更新本批记录的:
|
||||
- `change_type`
|
||||
- `previous_record_id`
|
||||
- `is_current`
|
||||
8. 将上一批同实体记录的 `is_current` 置为 `false`
|
||||
9. 将本次快照未出现但上一版存在的实体标记为 `deleted`
|
||||
10. 更新 `data_snapshots.status = success`
|
||||
11. 更新 `collection_tasks.status = success`
|
||||
|
||||
### 删除语义
|
||||
|
||||
这里不建议真的删记录。
|
||||
建议采用“逻辑消失”模型:
|
||||
|
||||
- 历史行永远保留
|
||||
- 如果某实体在新快照里消失:
|
||||
- 上一条历史记录补一条“删除状态记录”或标记 `change_type = deleted`
|
||||
- 同时该实体不再出现在当前视图
|
||||
|
||||
这样最适合态势感知。
|
||||
|
||||
## API 改造建议
|
||||
|
||||
### 保持现有接口默认行为
|
||||
|
||||
现有接口:
|
||||
|
||||
- `GET /api/v1/collected`
|
||||
- `GET /api/v1/collected/{id}`
|
||||
- `GET /api/v1/collected/summary`
|
||||
|
||||
建议默认仍返回“当前视图”,避免前端全面重写。
|
||||
|
||||
### 新增历史查询能力
|
||||
|
||||
建议新增参数或新接口:
|
||||
|
||||
#### 1. 当前/历史切换
|
||||
|
||||
`GET /api/v1/collected?mode=current|history`
|
||||
|
||||
- `current`:默认,查当前视图
|
||||
- `history`:查历史事实表
|
||||
|
||||
#### 2. 按快照查询
|
||||
|
||||
`GET /api/v1/collected?snapshot_id=123`
|
||||
|
||||
#### 3. 快照列表
|
||||
|
||||
`GET /api/v1/snapshots`
|
||||
|
||||
支持筛选:
|
||||
|
||||
- `datasource_id`
|
||||
- `source`
|
||||
- `status`
|
||||
- `date_from/date_to`
|
||||
|
||||
#### 4. 快照详情
|
||||
|
||||
`GET /api/v1/snapshots/{id}`
|
||||
|
||||
返回:
|
||||
|
||||
- 快照基础信息
|
||||
- 统计摘要
|
||||
- 与上一版的 diff 摘要
|
||||
|
||||
#### 5. 快照 diff
|
||||
|
||||
`GET /api/v1/snapshots/{id}/diff?base_snapshot_id=122`
|
||||
|
||||
返回:
|
||||
|
||||
- `created`
|
||||
- `updated`
|
||||
- `deleted`
|
||||
- `unchanged`
|
||||
|
||||
## 前端改造建议
|
||||
|
||||
### 1. 数据列表页
|
||||
|
||||
默认仍看当前数据,不改用户使用习惯。
|
||||
|
||||
建议新增:
|
||||
|
||||
- “视图模式”
|
||||
- 当前数据
|
||||
- 历史数据
|
||||
- “快照时间”筛选
|
||||
- “只看变化项”筛选
|
||||
|
||||
### 2. 数据详情页
|
||||
|
||||
详情页建议展示:
|
||||
|
||||
- 当前记录基础信息
|
||||
- 元数据动态字段
|
||||
- 所属快照
|
||||
- 上一版本对比入口
|
||||
- 历史版本时间线
|
||||
|
||||
### 3. 数据源管理页
|
||||
|
||||
“触发”按钮文案建议改成更准确的:
|
||||
|
||||
- `立即采集`
|
||||
|
||||
并在详情里补:
|
||||
|
||||
- 最近一次快照时间
|
||||
- 最近一次快照记录数
|
||||
- 最近一次变化数
|
||||
|
||||
## 迁移方案
|
||||
|
||||
### Phase 1:兼容式落地
|
||||
|
||||
目标:先保留当前页面可用。
|
||||
|
||||
改动:
|
||||
|
||||
1. 新增 `data_snapshots`
|
||||
2. 给 `collected_data` 增加:
|
||||
- `snapshot_id`
|
||||
- `task_id`
|
||||
- `entity_key`
|
||||
- `is_current`
|
||||
- `previous_record_id`
|
||||
- `change_type`
|
||||
- `change_summary`
|
||||
- `deleted_at`
|
||||
3. 现有数据全部补成一个“初始化快照”
|
||||
4. 现有 `/collected` 默认改查当前视图
|
||||
|
||||
优点:
|
||||
|
||||
- 前端几乎无感
|
||||
- 风险最小
|
||||
|
||||
### Phase 2:启用差异计算
|
||||
|
||||
目标:采集后可知道本次改了什么。
|
||||
|
||||
改动:
|
||||
|
||||
1. 写入时做新旧快照比对
|
||||
2. 写 `change_type`
|
||||
3. 生成快照摘要
|
||||
|
||||
### Phase 3:前端态势感知能力
|
||||
|
||||
目标:支持历史回放和趋势分析。
|
||||
|
||||
改动:
|
||||
|
||||
1. 快照时间线
|
||||
2. 版本 diff 页面
|
||||
3. 地图时间回放
|
||||
4. 告警和快照关联
|
||||
|
||||
## 唯一性与索引建议
|
||||
|
||||
### 建议保留的业务唯一性
|
||||
|
||||
在“同一个快照内部”,建议唯一:
|
||||
|
||||
- `(snapshot_id, source, source_id)`
|
||||
|
||||
不要在整张历史表上强加:
|
||||
|
||||
- `(source, source_id)` 唯一
|
||||
|
||||
因为历史表本来就应该允许同一实体跨快照存在多条版本。
|
||||
|
||||
### 建议索引
|
||||
|
||||
- `idx_collected_data_snapshot_id`
|
||||
- `idx_collected_data_source_source_id`
|
||||
- `idx_collected_data_entity_key`
|
||||
- `idx_collected_data_is_current`
|
||||
- `idx_collected_data_reference_date`
|
||||
- `idx_snapshots_source_completed_at`
|
||||
|
||||
## 风险点
|
||||
|
||||
1. 存储量会明显增加
|
||||
- 需要评估保留周期
|
||||
- 可以考虑冷热分层
|
||||
|
||||
2. 写入复杂度上升
|
||||
- 需要批量 upsert / diff 逻辑
|
||||
|
||||
3. 当前接口语义会从“表”变成“视图”
|
||||
- 文档必须同步
|
||||
|
||||
4. 某些采集器缺稳定 `source_id`
|
||||
- 需要补齐实体稳定键策略
|
||||
|
||||
## 对当前项目的具体建议
|
||||
|
||||
结合当前代码,推荐这样落地:
|
||||
|
||||
### 短期
|
||||
|
||||
1. 先设计并落表:
|
||||
- `data_snapshots`
|
||||
- `collected_data` 新字段
|
||||
2. 采集完成后每次新增快照
|
||||
3. `/api/v1/collected` 默认查 `is_current = true`
|
||||
|
||||
### 中期
|
||||
|
||||
1. 在 `BaseCollector._save_data()` 中改成:
|
||||
- 生成快照
|
||||
- 批量写历史
|
||||
- 标记当前
|
||||
2. 将 `CollectionTask.id` 关联到 `snapshot.task_id`
|
||||
|
||||
### 长期
|
||||
|
||||
1. 地图接口支持按 `snapshot_id` 查询
|
||||
2. 仪表盘支持“最近一次快照变化量”
|
||||
3. 告警支持绑定到快照版本
|
||||
|
||||
## 最终建议
|
||||
|
||||
最终建议采用:
|
||||
|
||||
- 历史事实表:保存每次采集结果
|
||||
- 当前视图:服务管理后台默认查询
|
||||
- 快照表:承载版本批次和 diff 语义
|
||||
|
||||
这样既能保留历史,又不会把当前页面全部推翻重做,是最适合后续做态势感知的一条路径。
|
||||
210
docs/earth-module-plan.md
Normal file
@@ -0,0 +1,210 @@
|
||||
# Earth 模块整治计划
|
||||
|
||||
## 背景
|
||||
|
||||
`planet` 前端中的 Earth 模块是当前最重要的大屏 3D 星球展示能力,但它仍以 legacy iframe 页面形式存在:
|
||||
|
||||
- React 页面入口仅为 [Earth.tsx](/home/ray/dev/linkong/planet/frontend/src/pages/Earth/Earth.tsx)
|
||||
- 实际 3D 实现位于 [frontend/public/earth](/home/ray/dev/linkong/planet/frontend/public/earth)
|
||||
|
||||
当前模块已经具备基础展示能力,但在生命周期、性能、可恢复性、可维护性方面存在明显隐患,不适合长期无人值守的大屏场景直接扩展。
|
||||
|
||||
## 目标
|
||||
|
||||
本计划的目标不是立刻重写 Earth,而是分阶段把它从“能跑的 legacy 展示页”提升到“可稳定运行、可持续演进的大屏核心模块”。
|
||||
|
||||
核心目标:
|
||||
|
||||
1. 先止血,解决资源泄漏、重载污染、假性卡顿等稳定性问题
|
||||
2. 再梳理数据加载、交互和渲染循环,降低性能风险
|
||||
3. 最后逐步从 iframe legacy 向可控模块化架构迁移
|
||||
|
||||
## 现阶段主要问题
|
||||
|
||||
### 1. 生命周期缺失
|
||||
|
||||
- 没有统一 `destroy()` / 卸载清理逻辑
|
||||
- `requestAnimationFrame`
|
||||
- `window/document/dom listeners`
|
||||
- `THREE` geometry / material / texture
|
||||
- 运行时全局状态
|
||||
都没有系统回收
|
||||
|
||||
### 2. 数据重载不完整
|
||||
|
||||
- `reloadData()` 没有彻底清理旧场景对象
|
||||
- cable、landing point、satellite 相关缓存与对象存在累积风险
|
||||
|
||||
### 3. 渲染与命中检测成本高
|
||||
|
||||
- 鼠标移动时频繁创建 `Raycaster` / `Vector2`
|
||||
- cable 命中前会重复做 bounding box 计算
|
||||
- 卫星每帧计算量偏高
|
||||
|
||||
### 4. 状态管理分裂
|
||||
|
||||
- 大量依赖 `window.*` 全局桥接
|
||||
- 模块之间靠隐式共享状态通信
|
||||
- React 外层无法有效感知 Earth 内部状态
|
||||
|
||||
### 5. 错误恢复弱
|
||||
|
||||
- 数据加载失败主要依赖 `console` 和轻提示
|
||||
- 缺少统一重试、降级、局部失败隔离机制
|
||||
|
||||
## 分阶段计划
|
||||
|
||||
## Phase 1:稳定性止血
|
||||
|
||||
目标:
|
||||
|
||||
- 不改视觉主形态
|
||||
- 优先解决泄漏、卡死、重载污染
|
||||
|
||||
### 任务
|
||||
|
||||
1. 补 Earth 生命周期管理
|
||||
|
||||
- 为 [main.js](/home/ray/dev/linkong/planet/frontend/public/earth/js/main.js) 增加:
|
||||
- `init()`
|
||||
- `destroy()`
|
||||
- `reloadData()`
|
||||
三类明确入口
|
||||
- 统一记录并释放:
|
||||
- animation frame id
|
||||
- interval / timeout
|
||||
- DOM 事件监听
|
||||
- `window` 暴露对象
|
||||
|
||||
2. 增加场景对象清理层
|
||||
|
||||
- 为 cable / landing point / satellite sprite / orbit line 提供统一清理函数
|
||||
- reload 前先 dispose 旧对象,再重新加载
|
||||
|
||||
3. 增加 stale 状态恢复
|
||||
|
||||
- 页面重新进入时,先清理上一次遗留选择态、hover 态、锁定态
|
||||
- 避免 iframe reload 后出现旧状态残留
|
||||
|
||||
4. 加强失败提示
|
||||
|
||||
- 电缆、登陆点、卫星加载拆分为独立状态
|
||||
- 某一类数据失败时,其它类型仍可继续显示
|
||||
- 提供明确的页面内提示而不是只打 console
|
||||
|
||||
### 验收标准
|
||||
|
||||
- 页面重复进入 / 离开后内存不持续上涨
|
||||
- 连续多次点“重新加载数据”后对象数量不异常增加
|
||||
- 单一数据源加载失败时页面不整体失效
|
||||
|
||||
## Phase 2:性能优化
|
||||
|
||||
目标:
|
||||
|
||||
- 控制鼠标交互和动画循环成本
|
||||
- 提升大屏长时间运行的稳定帧率
|
||||
|
||||
### 任务
|
||||
|
||||
1. 复用交互对象
|
||||
|
||||
- 复用 `Raycaster`、`Vector2`、中间 `Vector3`
|
||||
- 避免 `mousemove` 热路径中频繁 new 对象
|
||||
|
||||
2. 优化 cable 命中逻辑
|
||||
|
||||
- 提前缓存 cable 中心点 / bounding 数据
|
||||
- 移除 `mousemove` 内重复 `computeBoundingBox()`
|
||||
- 必要时增加分层命中:
|
||||
- 先粗筛
|
||||
- 再精确相交
|
||||
|
||||
3. 改造动画循环
|
||||
|
||||
- 使用真实 `deltaTime`
|
||||
- 把卫星位置更新、呼吸动画、视觉状态更新拆成独立阶段
|
||||
- 为不可见对象减少无意义更新
|
||||
|
||||
4. 卫星轨迹与预测轨道优化
|
||||
|
||||
- 评估轨迹更新频率
|
||||
- 对高开销几何计算增加缓存
|
||||
- 限制预测轨道生成频次
|
||||
|
||||
### 验收标准
|
||||
|
||||
- 鼠标移动时不明显掉帧
|
||||
- 中高数据量下动画速度不受帧率明显影响
|
||||
- 长时间运行 CPU/GPU 占用更平稳
|
||||
|
||||
## Phase 3:架构收编
|
||||
|
||||
目标:
|
||||
|
||||
- 降低 legacy iframe 架构带来的维护成本
|
||||
- 让 React 主应用重新获得对 Earth 模块的控制力
|
||||
|
||||
### 任务
|
||||
|
||||
1. 抽离 Earth App Shell
|
||||
|
||||
- 将数据加载、错误状态、控制面板状态抽到更明确的模块边界
|
||||
- 减少 `window.*` 全局依赖
|
||||
|
||||
2. 规范模块通信
|
||||
|
||||
- 统一 `main / controls / cables / satellites / ui` 的状态流
|
||||
- 明确只读配置、运行时状态、渲染对象的职责分层
|
||||
|
||||
3. 评估去 iframe 迁移
|
||||
|
||||
- 中期可以保留 public/legacy 资源目录
|
||||
- 但逐步把 Earth 作为前端内嵌模块而不是完全孤立页面
|
||||
|
||||
### 验收标准
|
||||
|
||||
- Earth 内部状态不再大量依赖全局变量
|
||||
- React 外层可以感知 Earth 加载状态和错误状态
|
||||
- 后续功能开发不再必须修改多个 legacy 文件才能完成
|
||||
|
||||
## 优先级建议
|
||||
|
||||
### P0
|
||||
|
||||
- 生命周期清理
|
||||
- reload 清理
|
||||
- stale 状态恢复
|
||||
|
||||
### P1
|
||||
|
||||
- 命中检测优化
|
||||
- 动画 `deltaTime`
|
||||
- 数据加载失败隔离
|
||||
|
||||
### P2
|
||||
|
||||
- 全局状态收编
|
||||
- iframe 架构迁移
|
||||
|
||||
## 推荐实施顺序
|
||||
|
||||
1. 先做 Phase 1
|
||||
2. 再做交互热路径与动画循环优化
|
||||
3. 最后再考虑架构迁移
|
||||
|
||||
## 风险提示
|
||||
|
||||
1. Earth 是 legacy 模块,修复时容易牵一发而动全身
|
||||
2. 如果不先补清理逻辑,后续所有性能优化收益都会被泄漏问题吃掉
|
||||
3. 如果过早重写而不先止血,短期会影响现有演示稳定性
|
||||
|
||||
## 当前建议
|
||||
|
||||
最值得马上启动的是一个小范围稳定性 sprint:
|
||||
|
||||
- 生命周期清理
|
||||
- reload 全量清理
|
||||
- 错误状态隔离
|
||||
|
||||
这个阶段不追求“更炫”,先追求“更稳”。稳定下来之后,再进入性能和架构层的优化。
|
||||
48
docs/system-settings-plan.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# 系统配置中心开发计划
|
||||
|
||||
## 目标
|
||||
|
||||
将当前仅保存于内存中的“系统配置”页面升级为真正可用的配置中心,优先服务以下两类能力:
|
||||
|
||||
1. 系统级配置持久化
|
||||
2. 采集调度配置管理
|
||||
|
||||
## 第一阶段范围
|
||||
|
||||
### 1. 系统配置持久化
|
||||
|
||||
- 新增 `system_settings` 表,用于保存分类配置
|
||||
- 将系统、通知、安全配置从进程内存迁移到数据库
|
||||
- 提供统一读取接口,页面刷新和服务重启后保持不丢失
|
||||
|
||||
### 2. 采集调度配置接入真实数据源
|
||||
|
||||
- 统一内置采集器默认定义
|
||||
- 启动时自动初始化 `data_sources` 表
|
||||
- 配置页允许修改:
|
||||
- 是否启用
|
||||
- 采集频率(分钟)
|
||||
- 优先级
|
||||
- 修改后实时同步到调度器
|
||||
|
||||
### 3. 前端配置页重构
|
||||
|
||||
- 将当前通用模板页调整为项目专用配置中心
|
||||
- 增加“采集调度”Tab
|
||||
- 保留“系统显示 / 通知 / 安全”三类配置
|
||||
- 将设置页正式接入主路由
|
||||
|
||||
## 非本阶段内容
|
||||
|
||||
- 邮件发送能力本身
|
||||
- 配置审计历史
|
||||
- 敏感凭证加密管理
|
||||
- 多租户或按角色细粒度配置
|
||||
|
||||
## 验收标准
|
||||
|
||||
- 设置项修改后重启服务仍然存在
|
||||
- 配置页可以查看并修改所有内置采集器的启停与采集频率
|
||||
- 调整采集频率后,调度器任务随之更新
|
||||
- `/settings` 页面可从主导航进入并正常工作
|
||||
|
||||
84
docs/version-history.md
Normal file
@@ -0,0 +1,84 @@
|
||||
# Version History
|
||||
|
||||
## Rules
|
||||
|
||||
- 初始版本从 `0.0.1-beta` 开始
|
||||
- 每次 `bugfix` 递增 `0.0.1`
|
||||
- 每次 `feature` 递增 `0.1.0`
|
||||
- `refactor / docs / maintenance` 默认不单独 bump 版本
|
||||
|
||||
## Assumptions
|
||||
|
||||
- 本文基于 `main` 与 `dev` 的非 merge commit 历史整理
|
||||
- 对于既包含修复又明显引入新能力的提交,按 `feature` 处理
|
||||
- `main` 表示已进入主线,`dev` 表示当前仍在开发分支上的增量
|
||||
|
||||
## Current Version
|
||||
|
||||
- `main` 当前主线历史推导到:`0.16.5`
|
||||
- `dev` 当前开发分支历史推导到:`0.21.6`
|
||||
|
||||
## Timeline
|
||||
|
||||
| Version | Type | Branch | Commit | Summary |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| `0.0.1-beta` | bootstrap | `main` | `e7033775` | first commit |
|
||||
| `0.1.0` | feature | `main` | `6cb4398f` | Modularize 3D Earth page with ES Modules |
|
||||
| `0.2.0` | feature | `main` | `aaae6a53` | Add cable graph service and data collectors |
|
||||
| `0.2.1` | bugfix | `main` | `ceb1b728` | highlight all cable segments by cable_id |
|
||||
| `0.3.0` | feature | `main` | `14d11cd9` | add ArcGIS landing points and cable-landing relation collectors |
|
||||
| `0.4.0` | feature | `main` | `99771a88` | make ArcGIS data source URLs configurable |
|
||||
| `0.5.0` | feature | `main` | `de325521` | add data sources config system and Earth API integration |
|
||||
| `0.5.1` | bugfix | `main` | `b06cb460` | remove ignored files from tracking |
|
||||
| `0.5.2` | bugfix | `main` | `948af2c8` | fix coordinates-display position |
|
||||
| `0.6.0` | feature | `main` | `4e487b31` | upload new geo json |
|
||||
| `0.6.1` | bugfix | `main` | `02991730` | add cable_id to API response for cable highlighting |
|
||||
| `0.6.2` | bugfix | `main` | `c82e1d5a` | 修复 3D 地球坐标映射多个严重 bug |
|
||||
| `0.7.0` | feature | `main` | `3b0e9dec` | 统一卫星和线缆锁定逻辑,使用 lockedObject 系统 |
|
||||
| `0.7.1` | bugfix | `main` | `11a9dda9` | 修复 resetView 调用并统一启动脚本到 `planet.sh` |
|
||||
| `0.7.2` | bugfix | `main` | `e21b783b` | 修复 ArcGIS landing GeoJSON 坐标解析错误 |
|
||||
| `0.8.0` | feature | `main` | `f5083071` | 自动旋转按钮改为播放/暂停图标状态 |
|
||||
| `0.8.1` | bugfix | `main` | `777891f8` | 修复 resetView 视角和离开地球隐藏 tooltip |
|
||||
| `0.9.0` | feature | `main` | `1189fec0` | init view to China coordinates |
|
||||
| `0.10.0` | feature | `main` | `6fabbcfe` | request geolocation on resetView, fallback to China |
|
||||
| `0.11.0` | feature | `main` | `0ecc1bc5` | cable state management, hover/lock visual separation |
|
||||
| `0.12.0` | feature | `main` | `bb6b18fe` | satellite dot rendering with hover/lock rings |
|
||||
| `0.13.0` | feature | `main` | `3fcbae55` | add cable-landing point relation via `city_id` |
|
||||
| `0.14.0` | feature | `main` | `96222b9e` | toolbar layout and cable breathing effect improvements |
|
||||
| `0.15.0` | feature | `main` | `49a9c338` | toolbar and zoom improvements |
|
||||
| `0.16.0` | feature | `main` | `78bb639a` | toolbar zoom improvements and toggle-cables |
|
||||
| `0.16.1` | bugfix | `main` | `d9a64f77` | fix iframe scrollbar issue |
|
||||
| `0.16.2` | bugfix | `main` | `af29e90c` | prevent cable hover/click when cables are hidden |
|
||||
| `0.16.3` | bugfix | `main` | `eabdbdc8` | clear lock state when hiding satellites or cables |
|
||||
| `0.16.4` | bugfix | `main` | `0c950262` | fix satellite trail origin line and sync button state |
|
||||
| `0.16.5` | bugfix | `main` | `9d135bf2` | revert unstable toolbar change |
|
||||
| `0.16.6` | bugfix | `dev` | `465129ee` | use timestamp-based trail filtering to prevent flash |
|
||||
| `0.17.0` | feature | `dev` | `1784c057` | add predicted orbit display for locked satellites |
|
||||
| `0.17.1` | bugfix | `dev` | `543fe35f` | fix ring size attenuation and breathing animation |
|
||||
| `0.17.2` | bugfix | `dev` | `b9fbacad` | prevent selecting satellites on far side of earth |
|
||||
| `0.17.3` | bugfix | `dev` | `b57d69c9` | remove debug console.log for ring create/update |
|
||||
| `0.17.4` | bugfix | `dev` | `81a0ca5e` | fix back-facing detection with proper coordinate transform |
|
||||
| `0.18.0` | feature | `dev` | `ef0fefdf` | persist system settings and refine admin layouts |
|
||||
| `0.18.1` | bugfix | `dev` | `cc5f16f8` | fix settings layout and frontend startup checks |
|
||||
| `0.19.0` | feature | `dev` | `020c1d50` | refine data management and collection workflows |
|
||||
| `0.20.0` | feature | `dev` | `ce5feba3` | stabilize Earth module and fix satellite TLE handling |
|
||||
| `0.21.0` | feature | `dev` | `pending` | add Earth inertial drag, sync hover/trail state, and support unlimited satellite loading |
|
||||
| `0.21.1` | bugfix | `dev` | `pending` | polish Earth toolbar controls, icons, and loading copy |
|
||||
| `0.21.2` | bugfix | `dev` | `pending` | redesign Earth HUD with liquid-glass controls, dynamic legend switching, and info-card interaction polish |
|
||||
| `0.21.3` | bugfix | `dev` | `30a29a6e` | harden `planet.sh` startup controls, add selective restart and interactive user creation |
|
||||
| `0.21.4` | bugfix | `dev` | `7ec9586f` | add Earth HUD backup snapshots and icon assets |
|
||||
| `0.21.5` | bugfix | `dev` | `a761dfc5` | refine Earth legend item presentation |
|
||||
| `0.21.6` | bugfix | `dev` | `pending` | improve Earth legend generation, info-card interactions, and HUD messaging polish |
|
||||
|
||||
## Maintenance Commits Not Counted as Version Bumps
|
||||
|
||||
这些提交被视为维护性工作,因此未单独递增版本号:
|
||||
|
||||
- `3145ff08` Add `.gitignore` and clean
|
||||
- `4ada75ca` new branch
|
||||
- `c2eba54d` 整理资源文件,添加 legacy 路由
|
||||
- `82f7aa29` 提取地球坐标常量到 `EARTH_CONFIG`
|
||||
- `d18e400f` remove dead code
|
||||
- `869d661a` abstract cable highlight logic
|
||||
- `4f922f13` extract satellite config to `SATELLITE_CONFIG`
|
||||
- `3e3090d7` docs: add architecture refactor and webgl instancing plans
|
||||
477
frontend/bun.lock
Normal file
@@ -0,0 +1,477 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "planet-frontend",
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^5.2.6",
|
||||
"antd": "^5.12.5",
|
||||
"axios": "^1.6.2",
|
||||
"dayjs": "^1.11.10",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-resizable": "^3.1.3",
|
||||
"react-router-dom": "^6.21.0",
|
||||
"simplex-noise": "^4.0.1",
|
||||
"socket.io-client": "^4.7.2",
|
||||
"three": "^0.160.0",
|
||||
"zustand": "^4.4.7",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.2.45",
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.0.10",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@ant-design/colors": ["@ant-design/colors@7.2.1", "", { "dependencies": { "@ant-design/fast-color": "^2.0.6" } }, "sha512-lCHDcEzieu4GA3n8ELeZ5VQ8pKQAWcGGLRTQ50aQM2iqPpq2evTxER84jfdPvsPAtEcZ7m44NI45edFMo8oOYQ=="],
|
||||
|
||||
"@ant-design/cssinjs": ["@ant-design/cssinjs@1.24.0", "", { "dependencies": { "@babel/runtime": "^7.11.1", "@emotion/hash": "^0.8.0", "@emotion/unitless": "^0.7.5", "classnames": "^2.3.1", "csstype": "^3.1.3", "rc-util": "^5.35.0", "stylis": "^4.3.4" }, "peerDependencies": { "react": ">=16.0.0", "react-dom": ">=16.0.0" } }, "sha512-K4cYrJBsgvL+IoozUXYjbT6LHHNt+19a9zkvpBPxLjFHas1UpPM2A5MlhROb0BT8N8WoavM5VsP9MeSeNK/3mg=="],
|
||||
|
||||
"@ant-design/cssinjs-utils": ["@ant-design/cssinjs-utils@1.1.3", "", { "dependencies": { "@ant-design/cssinjs": "^1.21.0", "@babel/runtime": "^7.23.2", "rc-util": "^5.38.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-nOoQMLW1l+xR1Co8NFVYiP8pZp3VjIIzqV6D6ShYF2ljtdwWJn5WSsH+7kvCktXL/yhEtWURKOfH5Xz/gzlwsg=="],
|
||||
|
||||
"@ant-design/fast-color": ["@ant-design/fast-color@2.0.6", "", { "dependencies": { "@babel/runtime": "^7.24.7" } }, "sha512-y2217gk4NqL35giHl72o6Zzqji9O7vHh9YmhUVkPtAOpoTCH4uWxo/pr4VE8t0+ChEPs0qo4eJRC5Q1eXWo3vA=="],
|
||||
|
||||
"@ant-design/icons": ["@ant-design/icons@5.6.1", "", { "dependencies": { "@ant-design/colors": "^7.0.0", "@ant-design/icons-svg": "^4.4.0", "@babel/runtime": "^7.24.8", "classnames": "^2.2.6", "rc-util": "^5.31.1" }, "peerDependencies": { "react": ">=16.0.0", "react-dom": ">=16.0.0" } }, "sha512-0/xS39c91WjPAZOWsvi1//zjx6kAp4kxWwctR6kuU6p133w8RU0D2dSCvZC19uQyharg/sAvYxGYWl01BbZZfg=="],
|
||||
|
||||
"@ant-design/icons-svg": ["@ant-design/icons-svg@4.4.2", "", {}, "sha512-vHbT+zJEVzllwP+CM+ul7reTEfBR0vgxFe7+lREAsAA7YGsYpboiq2sQNeQeRvh09GfQgs/GyFEvZpJ9cLXpXA=="],
|
||||
|
||||
"@ant-design/react-slick": ["@ant-design/react-slick@1.1.2", "", { "dependencies": { "@babel/runtime": "^7.10.4", "classnames": "^2.2.5", "json2mq": "^0.2.0", "resize-observer-polyfill": "^1.5.1", "throttle-debounce": "^5.0.0" }, "peerDependencies": { "react": ">=16.9.0" } }, "sha512-EzlvzE6xQUBrZuuhSAFTdsr4P2bBBHGZwKFemEfq8gIGyIQCxalYfZW/T2ORbtQx5rU69o+WycP3exY/7T1hGA=="],
|
||||
|
||||
"@babel/code-frame": ["@babel/code-frame@7.29.0", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw=="],
|
||||
|
||||
"@babel/compat-data": ["@babel/compat-data@7.29.0", "", {}, "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg=="],
|
||||
|
||||
"@babel/core": ["@babel/core@7.29.0", "", { "dependencies": { "@babel/code-frame": "^7.29.0", "@babel/generator": "^7.29.0", "@babel/helper-compilation-targets": "^7.28.6", "@babel/helper-module-transforms": "^7.28.6", "@babel/helpers": "^7.28.6", "@babel/parser": "^7.29.0", "@babel/template": "^7.28.6", "@babel/traverse": "^7.29.0", "@babel/types": "^7.29.0", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA=="],
|
||||
|
||||
"@babel/generator": ["@babel/generator@7.29.0", "", { "dependencies": { "@babel/parser": "^7.29.0", "@babel/types": "^7.29.0", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" } }, "sha512-vSH118/wwM/pLR38g/Sgk05sNtro6TlTJKuiMXDaZqPUfjTFcudpCOt00IhOfj+1BFAX+UFAlzCU+6WXr3GLFQ=="],
|
||||
|
||||
"@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.28.6", "", { "dependencies": { "@babel/compat-data": "^7.28.6", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" } }, "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA=="],
|
||||
|
||||
"@babel/helper-globals": ["@babel/helper-globals@7.28.0", "", {}, "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw=="],
|
||||
|
||||
"@babel/helper-module-imports": ["@babel/helper-module-imports@7.28.6", "", { "dependencies": { "@babel/traverse": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw=="],
|
||||
|
||||
"@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.28.6", "", { "dependencies": { "@babel/helper-module-imports": "^7.28.6", "@babel/helper-validator-identifier": "^7.28.5", "@babel/traverse": "^7.28.6" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA=="],
|
||||
|
||||
"@babel/helper-plugin-utils": ["@babel/helper-plugin-utils@7.28.6", "", {}, "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug=="],
|
||||
|
||||
"@babel/helper-string-parser": ["@babel/helper-string-parser@7.27.1", "", {}, "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA=="],
|
||||
|
||||
"@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.28.5", "", {}, "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q=="],
|
||||
|
||||
"@babel/helper-validator-option": ["@babel/helper-validator-option@7.27.1", "", {}, "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg=="],
|
||||
|
||||
"@babel/helpers": ["@babel/helpers@7.28.6", "", { "dependencies": { "@babel/template": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw=="],
|
||||
|
||||
"@babel/parser": ["@babel/parser@7.29.0", "", { "dependencies": { "@babel/types": "^7.29.0" }, "bin": { "parser": "bin/babel-parser.js" } }, "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww=="],
|
||||
|
||||
"@babel/plugin-transform-react-jsx-self": ["@babel/plugin-transform-react-jsx-self@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw=="],
|
||||
|
||||
"@babel/plugin-transform-react-jsx-source": ["@babel/plugin-transform-react-jsx-source@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw=="],
|
||||
|
||||
"@babel/runtime": ["@babel/runtime@7.28.6", "", {}, "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA=="],
|
||||
|
||||
"@babel/template": ["@babel/template@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/parser": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ=="],
|
||||
|
||||
"@babel/traverse": ["@babel/traverse@7.29.0", "", { "dependencies": { "@babel/code-frame": "^7.29.0", "@babel/generator": "^7.29.0", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.29.0", "@babel/template": "^7.28.6", "@babel/types": "^7.29.0", "debug": "^4.3.1" } }, "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA=="],
|
||||
|
||||
"@babel/types": ["@babel/types@7.29.0", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A=="],
|
||||
|
||||
"@emotion/hash": ["@emotion/hash@0.8.0", "", {}, "sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow=="],
|
||||
|
||||
"@emotion/unitless": ["@emotion/unitless@0.7.5", "", {}, "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg=="],
|
||||
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="],
|
||||
|
||||
"@esbuild/android-arm": ["@esbuild/android-arm@0.21.5", "", { "os": "android", "cpu": "arm" }, "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg=="],
|
||||
|
||||
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.21.5", "", { "os": "android", "cpu": "arm64" }, "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A=="],
|
||||
|
||||
"@esbuild/android-x64": ["@esbuild/android-x64@0.21.5", "", { "os": "android", "cpu": "x64" }, "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA=="],
|
||||
|
||||
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.21.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ=="],
|
||||
|
||||
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.21.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw=="],
|
||||
|
||||
"@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.21.5", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g=="],
|
||||
|
||||
"@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.21.5", "", { "os": "freebsd", "cpu": "x64" }, "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ=="],
|
||||
|
||||
"@esbuild/linux-arm": ["@esbuild/linux-arm@0.21.5", "", { "os": "linux", "cpu": "arm" }, "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA=="],
|
||||
|
||||
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.21.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q=="],
|
||||
|
||||
"@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.21.5", "", { "os": "linux", "cpu": "ia32" }, "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg=="],
|
||||
|
||||
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg=="],
|
||||
|
||||
"@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg=="],
|
||||
|
||||
"@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.21.5", "", { "os": "linux", "cpu": "ppc64" }, "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w=="],
|
||||
|
||||
"@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA=="],
|
||||
|
||||
"@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.21.5", "", { "os": "linux", "cpu": "s390x" }, "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A=="],
|
||||
|
||||
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.21.5", "", { "os": "linux", "cpu": "x64" }, "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ=="],
|
||||
|
||||
"@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.21.5", "", { "os": "none", "cpu": "x64" }, "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg=="],
|
||||
|
||||
"@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.21.5", "", { "os": "openbsd", "cpu": "x64" }, "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow=="],
|
||||
|
||||
"@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.21.5", "", { "os": "sunos", "cpu": "x64" }, "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg=="],
|
||||
|
||||
"@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.21.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A=="],
|
||||
|
||||
"@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.21.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA=="],
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
|
||||
|
||||
"@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
|
||||
|
||||
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
|
||||
|
||||
"@rc-component/async-validator": ["@rc-component/async-validator@5.1.0", "", { "dependencies": { "@babel/runtime": "^7.24.4" } }, "sha512-n4HcR5siNUXRX23nDizbZBQPO0ZM/5oTtmKZ6/eqL0L2bo747cklFdZGRN2f+c9qWGICwDzrhW0H7tE9PptdcA=="],
|
||||
|
||||
"@rc-component/color-picker": ["@rc-component/color-picker@2.0.1", "", { "dependencies": { "@ant-design/fast-color": "^2.0.6", "@babel/runtime": "^7.23.6", "classnames": "^2.2.6", "rc-util": "^5.38.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-WcZYwAThV/b2GISQ8F+7650r5ZZJ043E57aVBFkQ+kSY4C6wdofXgB0hBx+GPGpIU0Z81eETNoDUJMr7oy/P8Q=="],
|
||||
|
||||
"@rc-component/context": ["@rc-component/context@1.4.0", "", { "dependencies": { "@babel/runtime": "^7.10.1", "rc-util": "^5.27.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-kFcNxg9oLRMoL3qki0OMxK+7g5mypjgaaJp/pkOis/6rVxma9nJBF/8kCIuTYHUQNr0ii7MxqE33wirPZLJQ2w=="],
|
||||
|
||||
"@rc-component/mini-decimal": ["@rc-component/mini-decimal@1.1.0", "", { "dependencies": { "@babel/runtime": "^7.18.0" } }, "sha512-jS4E7T9Li2GuYwI6PyiVXmxTiM6b07rlD9Ge8uGZSCz3WlzcG5ZK7g5bbuKNeZ9pgUuPK/5guV781ujdVpm4HQ=="],
|
||||
|
||||
"@rc-component/mutate-observer": ["@rc-component/mutate-observer@1.1.0", "", { "dependencies": { "@babel/runtime": "^7.18.0", "classnames": "^2.3.2", "rc-util": "^5.24.4" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-QjrOsDXQusNwGZPf4/qRQasg7UFEj06XiCJ8iuiq/Io7CrHrgVi6Uuetw60WAMG1799v+aM8kyc+1L/GBbHSlw=="],
|
||||
|
||||
"@rc-component/portal": ["@rc-component/portal@1.1.2", "", { "dependencies": { "@babel/runtime": "^7.18.0", "classnames": "^2.3.2", "rc-util": "^5.24.4" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-6f813C0IsasTZms08kfA8kPAGxbbkYToa8ALaiDIGGECU4i9hj8Plgbx0sNJDrey3EtHO30hmdaxtT0138xZcg=="],
|
||||
|
||||
"@rc-component/qrcode": ["@rc-component/qrcode@1.1.1", "", { "dependencies": { "@babel/runtime": "^7.24.7" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-LfLGNymzKdUPjXUbRP+xOhIWY4jQ+YMj5MmWAcgcAq1Ij8XP7tRmAXqyuv96XvLUBE/5cA8hLFl9eO1JQMujrA=="],
|
||||
|
||||
"@rc-component/tour": ["@rc-component/tour@1.15.1", "", { "dependencies": { "@babel/runtime": "^7.18.0", "@rc-component/portal": "^1.0.0-9", "@rc-component/trigger": "^2.0.0", "classnames": "^2.3.2", "rc-util": "^5.24.4" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-Tr2t7J1DKZUpfJuDZWHxyxWpfmj8EZrqSgyMZ+BCdvKZ6r1UDsfU46M/iWAAFBy961Ssfom2kv5f3UcjIL2CmQ=="],
|
||||
|
||||
"@rc-component/trigger": ["@rc-component/trigger@2.3.1", "", { "dependencies": { "@babel/runtime": "^7.23.2", "@rc-component/portal": "^1.1.0", "classnames": "^2.3.2", "rc-motion": "^2.0.0", "rc-resize-observer": "^1.3.1", "rc-util": "^5.44.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-ORENF39PeXTzM+gQEshuk460Z8N4+6DkjpxlpE7Q3gYy1iBpLrx0FOJz3h62ryrJZ/3zCAUIkT1Pb/8hHWpb3A=="],
|
||||
|
||||
"@remix-run/router": ["@remix-run/router@1.23.2", "", {}, "sha512-Ic6m2U/rMjTkhERIa/0ZtXJP17QUi2CbWE7cqx4J58M8aA3QTfW+2UlQ4psvTX9IO1RfNVhK3pcpdjej7L+t2w=="],
|
||||
|
||||
"@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.27", "", {}, "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA=="],
|
||||
|
||||
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.57.1", "", { "os": "android", "cpu": "arm" }, "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg=="],
|
||||
|
||||
"@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.57.1", "", { "os": "android", "cpu": "arm64" }, "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w=="],
|
||||
|
||||
"@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.57.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg=="],
|
||||
|
||||
"@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.57.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w=="],
|
||||
|
||||
"@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.57.1", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug=="],
|
||||
|
||||
"@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.57.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q=="],
|
||||
|
||||
"@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.57.1", "", { "os": "linux", "cpu": "arm" }, "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw=="],
|
||||
|
||||
"@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.57.1", "", { "os": "linux", "cpu": "arm" }, "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw=="],
|
||||
|
||||
"@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.57.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g=="],
|
||||
|
||||
"@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.57.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q=="],
|
||||
|
||||
"@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.57.1", "", { "os": "linux", "cpu": "none" }, "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA=="],
|
||||
|
||||
"@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.57.1", "", { "os": "linux", "cpu": "none" }, "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw=="],
|
||||
|
||||
"@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.57.1", "", { "os": "linux", "cpu": "ppc64" }, "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w=="],
|
||||
|
||||
"@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.57.1", "", { "os": "linux", "cpu": "ppc64" }, "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw=="],
|
||||
|
||||
"@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.57.1", "", { "os": "linux", "cpu": "none" }, "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A=="],
|
||||
|
||||
"@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.57.1", "", { "os": "linux", "cpu": "none" }, "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw=="],
|
||||
|
||||
"@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.57.1", "", { "os": "linux", "cpu": "s390x" }, "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg=="],
|
||||
|
||||
"@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.57.1", "", { "os": "linux", "cpu": "x64" }, "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg=="],
|
||||
|
||||
"@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.57.1", "", { "os": "linux", "cpu": "x64" }, "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw=="],
|
||||
|
||||
"@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.57.1", "", { "os": "openbsd", "cpu": "x64" }, "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw=="],
|
||||
|
||||
"@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.57.1", "", { "os": "none", "cpu": "arm64" }, "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ=="],
|
||||
|
||||
"@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.57.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ=="],
|
||||
|
||||
"@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.57.1", "", { "os": "win32", "cpu": "ia32" }, "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew=="],
|
||||
|
||||
"@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.57.1", "", { "os": "win32", "cpu": "x64" }, "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ=="],
|
||||
|
||||
"@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.57.1", "", { "os": "win32", "cpu": "x64" }, "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA=="],
|
||||
|
||||
"@socket.io/component-emitter": ["@socket.io/component-emitter@3.1.2", "", {}, "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA=="],
|
||||
|
||||
"@types/babel__core": ["@types/babel__core@7.20.5", "", { "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA=="],
|
||||
|
||||
"@types/babel__generator": ["@types/babel__generator@7.27.0", "", { "dependencies": { "@babel/types": "^7.0.0" } }, "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg=="],
|
||||
|
||||
"@types/babel__template": ["@types/babel__template@7.4.4", "", { "dependencies": { "@babel/parser": "^7.1.0", "@babel/types": "^7.0.0" } }, "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A=="],
|
||||
|
||||
"@types/babel__traverse": ["@types/babel__traverse@7.28.0", "", { "dependencies": { "@babel/types": "^7.28.2" } }, "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
|
||||
|
||||
"@types/prop-types": ["@types/prop-types@15.7.15", "", {}, "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw=="],
|
||||
|
||||
"@types/react": ["@types/react@18.3.27", "", { "dependencies": { "@types/prop-types": "*", "csstype": "^3.2.2" } }, "sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w=="],
|
||||
|
||||
"@types/react-dom": ["@types/react-dom@18.3.7", "", { "peerDependencies": { "@types/react": "^18.0.0" } }, "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ=="],
|
||||
|
||||
"@vitejs/plugin-react": ["@vitejs/plugin-react@4.7.0", "", { "dependencies": { "@babel/core": "^7.28.0", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", "@rolldown/pluginutils": "1.0.0-beta.27", "@types/babel__core": "^7.20.5", "react-refresh": "^0.17.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA=="],
|
||||
|
||||
"antd": ["antd@5.29.3", "", { "dependencies": { "@ant-design/colors": "^7.2.1", "@ant-design/cssinjs": "^1.23.0", "@ant-design/cssinjs-utils": "^1.1.3", "@ant-design/fast-color": "^2.0.6", "@ant-design/icons": "^5.6.1", "@ant-design/react-slick": "~1.1.2", "@babel/runtime": "^7.26.0", "@rc-component/color-picker": "~2.0.1", "@rc-component/mutate-observer": "^1.1.0", "@rc-component/qrcode": "~1.1.0", "@rc-component/tour": "~1.15.1", "@rc-component/trigger": "^2.3.0", "classnames": "^2.5.1", "copy-to-clipboard": "^3.3.3", "dayjs": "^1.11.11", "rc-cascader": "~3.34.0", "rc-checkbox": "~3.5.0", "rc-collapse": "~3.9.0", "rc-dialog": "~9.6.0", "rc-drawer": "~7.3.0", "rc-dropdown": "~4.2.1", "rc-field-form": "~2.7.1", "rc-image": "~7.12.0", "rc-input": "~1.8.0", "rc-input-number": "~9.5.0", "rc-mentions": "~2.20.0", "rc-menu": "~9.16.1", "rc-motion": "^2.9.5", "rc-notification": "~5.6.4", "rc-pagination": "~5.1.0", "rc-picker": "~4.11.3", "rc-progress": "~4.0.0", "rc-rate": "~2.13.1", "rc-resize-observer": "^1.4.3", "rc-segmented": "~2.7.0", "rc-select": "~14.16.8", "rc-slider": "~11.1.9", "rc-steps": "~6.0.1", "rc-switch": "~4.1.0", "rc-table": "~7.54.0", "rc-tabs": "~15.7.0", "rc-textarea": "~1.10.2", "rc-tooltip": "~6.4.0", "rc-tree": "~5.13.1", "rc-tree-select": "~5.27.0", "rc-upload": "~4.11.0", "rc-util": "^5.44.4", "scroll-into-view-if-needed": "^3.1.0", "throttle-debounce": "^5.0.2" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-3DdbGCa9tWAJGcCJ6rzR8EJFsv2CtyEbkVabZE14pfgUHfCicWCj0/QzQVLDYg8CPfQk9BH7fHCoTXHTy7MP/A=="],
|
||||
|
||||
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
|
||||
|
||||
"axios": ["axios@1.13.4", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, "sha512-1wVkUaAO6WyaYtCkcYCOx12ZgpGf9Zif+qXa4n+oYzK558YryKqiL6UWwd5DqiH3VRW0GYhTZQ/vlgJrCoNQlg=="],
|
||||
|
||||
"baseline-browser-mapping": ["baseline-browser-mapping@2.9.19", "", { "bin": "dist/cli.js" }, "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg=="],
|
||||
|
||||
"browserslist": ["browserslist@4.28.1", "", { "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", "electron-to-chromium": "^1.5.263", "node-releases": "^2.0.27", "update-browserslist-db": "^1.2.0" }, "bin": "cli.js" }, "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA=="],
|
||||
|
||||
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="],
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001767", "", {}, "sha512-34+zUAMhSH+r+9eKmYG+k2Rpt8XttfE4yXAjoZvkAPs15xcYQhyBYdalJ65BzivAvGRMViEjy6oKr/S91loekQ=="],
|
||||
|
||||
"classnames": ["classnames@2.5.1", "", {}, "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow=="],
|
||||
|
||||
"clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="],
|
||||
|
||||
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
|
||||
|
||||
"compute-scroll-into-view": ["compute-scroll-into-view@3.1.1", "", {}, "sha512-VRhuHOLoKYOy4UbilLbUzbYg93XLjv2PncJC50EuTWPA3gaja1UjBsUP/D/9/juV3vQFr6XBEzn9KCAHdUvOHw=="],
|
||||
|
||||
"convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="],
|
||||
|
||||
"copy-to-clipboard": ["copy-to-clipboard@3.3.3", "", { "dependencies": { "toggle-selection": "^1.0.6" } }, "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA=="],
|
||||
|
||||
"csstype": ["csstype@3.2.3", "", {}, "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ=="],
|
||||
|
||||
"dayjs": ["dayjs@1.11.19", "", {}, "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw=="],
|
||||
|
||||
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
|
||||
|
||||
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
||||
|
||||
"dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="],
|
||||
|
||||
"electron-to-chromium": ["electron-to-chromium@1.5.286", "", {}, "sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A=="],
|
||||
|
||||
"engine.io-client": ["engine.io-client@6.6.4", "", { "dependencies": { "@socket.io/component-emitter": "~3.1.0", "debug": "~4.4.1", "engine.io-parser": "~5.2.1", "ws": "~8.18.3", "xmlhttprequest-ssl": "~2.1.1" } }, "sha512-+kjUJnZGwzewFDw951CDWcwj35vMNf2fcj7xQWOctq1F2i1jkDdVvdFG9kM/BEChymCH36KgjnW0NsL58JYRxw=="],
|
||||
|
||||
"engine.io-parser": ["engine.io-parser@5.2.3", "", {}, "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q=="],
|
||||
|
||||
"es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
|
||||
|
||||
"es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
|
||||
|
||||
"es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="],
|
||||
|
||||
"es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="],
|
||||
|
||||
"esbuild": ["esbuild@0.21.5", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.21.5", "@esbuild/android-arm": "0.21.5", "@esbuild/android-arm64": "0.21.5", "@esbuild/android-x64": "0.21.5", "@esbuild/darwin-arm64": "0.21.5", "@esbuild/darwin-x64": "0.21.5", "@esbuild/freebsd-arm64": "0.21.5", "@esbuild/freebsd-x64": "0.21.5", "@esbuild/linux-arm": "0.21.5", "@esbuild/linux-arm64": "0.21.5", "@esbuild/linux-ia32": "0.21.5", "@esbuild/linux-loong64": "0.21.5", "@esbuild/linux-mips64el": "0.21.5", "@esbuild/linux-ppc64": "0.21.5", "@esbuild/linux-riscv64": "0.21.5", "@esbuild/linux-s390x": "0.21.5", "@esbuild/linux-x64": "0.21.5", "@esbuild/netbsd-x64": "0.21.5", "@esbuild/openbsd-x64": "0.21.5", "@esbuild/sunos-x64": "0.21.5", "@esbuild/win32-arm64": "0.21.5", "@esbuild/win32-ia32": "0.21.5", "@esbuild/win32-x64": "0.21.5" }, "bin": "bin/esbuild" }, "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw=="],
|
||||
|
||||
"escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
|
||||
|
||||
"follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="],
|
||||
|
||||
"form-data": ["form-data@4.0.5", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w=="],
|
||||
|
||||
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
|
||||
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
|
||||
"gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="],
|
||||
|
||||
"get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="],
|
||||
|
||||
"get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="],
|
||||
|
||||
"gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
|
||||
|
||||
"has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
|
||||
|
||||
"has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="],
|
||||
|
||||
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"jsesc": ["jsesc@3.1.0", "", { "bin": "bin/jsesc" }, "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA=="],
|
||||
|
||||
"json2mq": ["json2mq@0.2.0", "", { "dependencies": { "string-convert": "^0.2.0" } }, "sha512-SzoRg7ux5DWTII9J2qkrZrqV1gt+rTaoufMxEzXbS26Uid0NwaJd123HcoB80TgubEppxxIGdNxCx50fEoEWQA=="],
|
||||
|
||||
"json5": ["json5@2.2.3", "", { "bin": "lib/cli.js" }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": "cli.js" }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="],
|
||||
|
||||
"math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
|
||||
|
||||
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||
|
||||
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"nanoid": ["nanoid@3.3.11", "", { "bin": "bin/nanoid.cjs" }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
|
||||
|
||||
"node-releases": ["node-releases@2.0.27", "", {}, "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA=="],
|
||||
|
||||
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
|
||||
|
||||
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||
|
||||
"postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
|
||||
|
||||
"prop-types": ["prop-types@15.8.1", "", { "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.13.1" } }, "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg=="],
|
||||
|
||||
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
|
||||
|
||||
"rc-cascader": ["rc-cascader@3.34.0", "", { "dependencies": { "@babel/runtime": "^7.25.7", "classnames": "^2.3.1", "rc-select": "~14.16.2", "rc-tree": "~5.13.0", "rc-util": "^5.43.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-KpXypcvju9ptjW9FaN2NFcA2QH9E9LHKq169Y0eWtH4e/wHQ5Wh5qZakAgvb8EKZ736WZ3B0zLLOBsrsja5Dag=="],
|
||||
|
||||
"rc-checkbox": ["rc-checkbox@3.5.0", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.3.2", "rc-util": "^5.25.2" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-aOAQc3E98HteIIsSqm6Xk2FPKIER6+5vyEFMZfo73TqM+VVAIqOkHoPjgKLqSNtVLWScoaM7vY2ZrGEheI79yg=="],
|
||||
|
||||
"rc-collapse": ["rc-collapse@3.9.0", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "2.x", "rc-motion": "^2.3.4", "rc-util": "^5.27.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-swDdz4QZ4dFTo4RAUMLL50qP0EY62N2kvmk2We5xYdRwcRn8WcYtuetCJpwpaCbUfUt5+huLpVxhvmnK+PHrkA=="],
|
||||
|
||||
"rc-dialog": ["rc-dialog@9.6.0", "", { "dependencies": { "@babel/runtime": "^7.10.1", "@rc-component/portal": "^1.0.0-8", "classnames": "^2.2.6", "rc-motion": "^2.3.0", "rc-util": "^5.21.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-ApoVi9Z8PaCQg6FsUzS8yvBEQy0ZL2PkuvAgrmohPkN3okps5WZ5WQWPc1RNuiOKaAYv8B97ACdsFU5LizzCqg=="],
|
||||
|
||||
"rc-drawer": ["rc-drawer@7.3.0", "", { "dependencies": { "@babel/runtime": "^7.23.9", "@rc-component/portal": "^1.1.1", "classnames": "^2.2.6", "rc-motion": "^2.6.1", "rc-util": "^5.38.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-DX6CIgiBWNpJIMGFO8BAISFkxiuKitoizooj4BDyee8/SnBn0zwO2FHrNDpqqepj0E/TFTDpmEBCyFuTgC7MOg=="],
|
||||
|
||||
"rc-dropdown": ["rc-dropdown@4.2.1", "", { "dependencies": { "@babel/runtime": "^7.18.3", "@rc-component/trigger": "^2.0.0", "classnames": "^2.2.6", "rc-util": "^5.44.1" }, "peerDependencies": { "react": ">=16.11.0", "react-dom": ">=16.11.0" } }, "sha512-YDAlXsPv3I1n42dv1JpdM7wJ+gSUBfeyPK59ZpBD9jQhK9jVuxpjj3NmWQHOBceA1zEPVX84T2wbdb2SD0UjmA=="],
|
||||
|
||||
"rc-field-form": ["rc-field-form@2.7.1", "", { "dependencies": { "@babel/runtime": "^7.18.0", "@rc-component/async-validator": "^5.0.3", "rc-util": "^5.32.2" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-vKeSifSJ6HoLaAB+B8aq/Qgm8a3dyxROzCtKNCsBQgiverpc4kWDQihoUwzUj+zNWJOykwSY4dNX3QrGwtVb9A=="],
|
||||
|
||||
"rc-image": ["rc-image@7.12.0", "", { "dependencies": { "@babel/runtime": "^7.11.2", "@rc-component/portal": "^1.0.2", "classnames": "^2.2.6", "rc-dialog": "~9.6.0", "rc-motion": "^2.6.2", "rc-util": "^5.34.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-cZ3HTyyckPnNnUb9/DRqduqzLfrQRyi+CdHjdqgsyDpI3Ln5UX1kXnAhPBSJj9pVRzwRFgqkN7p9b6HBDjmu/Q=="],
|
||||
|
||||
"rc-input": ["rc-input@1.8.0", "", { "dependencies": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", "rc-util": "^5.18.1" }, "peerDependencies": { "react": ">=16.0.0", "react-dom": ">=16.0.0" } }, "sha512-KXvaTbX+7ha8a/k+eg6SYRVERK0NddX8QX7a7AnRvUa/rEH0CNMlpcBzBkhI0wp2C8C4HlMoYl8TImSN+fuHKA=="],
|
||||
|
||||
"rc-input-number": ["rc-input-number@9.5.0", "", { "dependencies": { "@babel/runtime": "^7.10.1", "@rc-component/mini-decimal": "^1.0.1", "classnames": "^2.2.5", "rc-input": "~1.8.0", "rc-util": "^5.40.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-bKaEvB5tHebUURAEXw35LDcnRZLq3x1k7GxfAqBMzmpHkDGzjAtnUL8y4y5N15rIFIg5IJgwr211jInl3cipag=="],
|
||||
|
||||
"rc-mentions": ["rc-mentions@2.20.0", "", { "dependencies": { "@babel/runtime": "^7.22.5", "@rc-component/trigger": "^2.0.0", "classnames": "^2.2.6", "rc-input": "~1.8.0", "rc-menu": "~9.16.0", "rc-textarea": "~1.10.0", "rc-util": "^5.34.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-w8HCMZEh3f0nR8ZEd466ATqmXFCMGMN5UFCzEUL0bM/nGw/wOS2GgRzKBcm19K++jDyuWCOJOdgcKGXU3fXfbQ=="],
|
||||
|
||||
"rc-menu": ["rc-menu@9.16.1", "", { "dependencies": { "@babel/runtime": "^7.10.1", "@rc-component/trigger": "^2.0.0", "classnames": "2.x", "rc-motion": "^2.4.3", "rc-overflow": "^1.3.1", "rc-util": "^5.27.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-ghHx6/6Dvp+fw8CJhDUHFHDJ84hJE3BXNCzSgLdmNiFErWSOaZNsihDAsKq9ByTALo/xkNIwtDFGIl6r+RPXBg=="],
|
||||
|
||||
"rc-motion": ["rc-motion@2.9.5", "", { "dependencies": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", "rc-util": "^5.44.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-w+XTUrfh7ArbYEd2582uDrEhmBHwK1ZENJiSJVb7uRxdE7qJSYjbO2eksRXmndqyKqKoYPc9ClpPh5242mV1vA=="],
|
||||
|
||||
"rc-notification": ["rc-notification@5.6.4", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "2.x", "rc-motion": "^2.9.0", "rc-util": "^5.20.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-KcS4O6B4qzM3KH7lkwOB7ooLPZ4b6J+VMmQgT51VZCeEcmghdeR4IrMcFq0LG+RPdnbe/ArT086tGM8Snimgiw=="],
|
||||
|
||||
"rc-overflow": ["rc-overflow@1.5.0", "", { "dependencies": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", "rc-resize-observer": "^1.0.0", "rc-util": "^5.37.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-Lm/v9h0LymeUYJf0x39OveU52InkdRXqnn2aYXfWmo8WdOonIKB2kfau+GF0fWq6jPgtdO9yMqveGcK6aIhJmg=="],
|
||||
|
||||
"rc-pagination": ["rc-pagination@5.1.0", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.3.2", "rc-util": "^5.38.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-8416Yip/+eclTFdHXLKTxZvn70duYVGTvUUWbckCCZoIl3jagqke3GLsFrMs0bsQBikiYpZLD9206Ej4SOdOXQ=="],
|
||||
|
||||
"rc-picker": ["rc-picker@4.11.3", "", { "dependencies": { "@babel/runtime": "^7.24.7", "@rc-component/trigger": "^2.0.0", "classnames": "^2.2.1", "rc-overflow": "^1.3.2", "rc-resize-observer": "^1.4.0", "rc-util": "^5.43.0" }, "peerDependencies": { "date-fns": ">= 2.x", "dayjs": ">= 1.x", "luxon": ">= 3.x", "moment": ">= 2.x", "react": ">=16.9.0", "react-dom": ">=16.9.0" }, "optionalPeers": ["date-fns", "luxon", "moment"] }, "sha512-MJ5teb7FlNE0NFHTncxXQ62Y5lytq6sh5nUw0iH8OkHL/TjARSEvSHpr940pWgjGANpjCwyMdvsEV55l5tYNSg=="],
|
||||
|
||||
"rc-progress": ["rc-progress@4.0.0", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", "rc-util": "^5.16.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-oofVMMafOCokIUIBnZLNcOZFsABaUw8PPrf1/y0ZBvKZNpOiu5h4AO9vv11Sw0p4Hb3D0yGWuEattcQGtNJ/aw=="],
|
||||
|
||||
"rc-rate": ["rc-rate@2.13.1", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.5", "rc-util": "^5.0.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-QUhQ9ivQ8Gy7mtMZPAjLbxBt5y9GRp65VcUyGUMF3N3fhiftivPHdpuDIaWIMOTEprAjZPC08bls1dQB+I1F2Q=="],
|
||||
|
||||
"rc-resize-observer": ["rc-resize-observer@1.4.3", "", { "dependencies": { "@babel/runtime": "^7.20.7", "classnames": "^2.2.1", "rc-util": "^5.44.1", "resize-observer-polyfill": "^1.5.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-YZLjUbyIWox8E9i9C3Tm7ia+W7euPItNWSPX5sCcQTYbnwDb5uNpnLHQCG1f22oZWUhLw4Mv2tFmeWe68CDQRQ=="],
|
||||
|
||||
"rc-segmented": ["rc-segmented@2.7.1", "", { "dependencies": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", "rc-motion": "^2.4.4", "rc-util": "^5.17.0" }, "peerDependencies": { "react": ">=16.0.0", "react-dom": ">=16.0.0" } }, "sha512-izj1Nw/Dw2Vb7EVr+D/E9lUTkBe+kKC+SAFSU9zqr7WV2W5Ktaa9Gc7cB2jTqgk8GROJayltaec+DBlYKc6d+g=="],
|
||||
|
||||
"rc-select": ["rc-select@14.16.8", "", { "dependencies": { "@babel/runtime": "^7.10.1", "@rc-component/trigger": "^2.1.1", "classnames": "2.x", "rc-motion": "^2.0.1", "rc-overflow": "^1.3.1", "rc-util": "^5.16.1", "rc-virtual-list": "^3.5.2" }, "peerDependencies": { "react": "*", "react-dom": "*" } }, "sha512-NOV5BZa1wZrsdkKaiK7LHRuo5ZjZYMDxPP6/1+09+FB4KoNi8jcG1ZqLE3AVCxEsYMBe65OBx71wFoHRTP3LRg=="],
|
||||
|
||||
"rc-slider": ["rc-slider@11.1.9", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.5", "rc-util": "^5.36.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-h8IknhzSh3FEM9u8ivkskh+Ef4Yo4JRIY2nj7MrH6GQmrwV6mcpJf5/4KgH5JaVI1H3E52yCdpOlVyGZIeph5A=="],
|
||||
|
||||
"rc-steps": ["rc-steps@6.0.1", "", { "dependencies": { "@babel/runtime": "^7.16.7", "classnames": "^2.2.3", "rc-util": "^5.16.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-lKHL+Sny0SeHkQKKDJlAjV5oZ8DwCdS2hFhAkIjuQt1/pB81M0cA0ErVFdHq9+jmPmFw1vJB2F5NBzFXLJxV+g=="],
|
||||
|
||||
"rc-switch": ["rc-switch@4.1.0", "", { "dependencies": { "@babel/runtime": "^7.21.0", "classnames": "^2.2.1", "rc-util": "^5.30.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-TI8ufP2Az9oEbvyCeVE4+90PDSljGyuwix3fV58p7HV2o4wBnVToEyomJRVyTaZeqNPAp+vqeo4Wnj5u0ZZQBg=="],
|
||||
|
||||
"rc-table": ["rc-table@7.54.0", "", { "dependencies": { "@babel/runtime": "^7.10.1", "@rc-component/context": "^1.4.0", "classnames": "^2.2.5", "rc-resize-observer": "^1.1.0", "rc-util": "^5.44.3", "rc-virtual-list": "^3.14.2" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-/wDTkki6wBTjwylwAGjpLKYklKo9YgjZwAU77+7ME5mBoS32Q4nAwoqhA2lSge6fobLW3Tap6uc5xfwaL2p0Sw=="],
|
||||
|
||||
"rc-tabs": ["rc-tabs@15.7.0", "", { "dependencies": { "@babel/runtime": "^7.11.2", "classnames": "2.x", "rc-dropdown": "~4.2.0", "rc-menu": "~9.16.0", "rc-motion": "^2.6.2", "rc-resize-observer": "^1.0.0", "rc-util": "^5.34.1" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-ZepiE+6fmozYdWf/9gVp7k56PKHB1YYoDsKeQA1CBlJ/POIhjkcYiv0AGP0w2Jhzftd3AVvZP/K+V+Lpi2ankA=="],
|
||||
|
||||
"rc-textarea": ["rc-textarea@1.10.2", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1", "rc-input": "~1.8.0", "rc-resize-observer": "^1.0.0", "rc-util": "^5.27.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-HfaeXiaSlpiSp0I/pvWpecFEHpVysZ9tpDLNkxQbMvMz6gsr7aVZ7FpWP9kt4t7DB+jJXesYS0us1uPZnlRnwQ=="],
|
||||
|
||||
"rc-tooltip": ["rc-tooltip@6.4.0", "", { "dependencies": { "@babel/runtime": "^7.11.2", "@rc-component/trigger": "^2.0.0", "classnames": "^2.3.1", "rc-util": "^5.44.3" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-kqyivim5cp8I5RkHmpsp1Nn/Wk+1oeloMv9c7LXNgDxUpGm+RbXJGL+OPvDlcRnx9DBeOe4wyOIl4OKUERyH1g=="],
|
||||
|
||||
"rc-tree": ["rc-tree@5.13.1", "", { "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "2.x", "rc-motion": "^2.0.1", "rc-util": "^5.16.1", "rc-virtual-list": "^3.5.1" }, "peerDependencies": { "react": "*", "react-dom": "*" } }, "sha512-FNhIefhftobCdUJshO7M8uZTA9F4OPGVXqGfZkkD/5soDeOhwO06T/aKTrg0WD8gRg/pyfq+ql3aMymLHCTC4A=="],
|
||||
|
||||
"rc-tree-select": ["rc-tree-select@5.27.0", "", { "dependencies": { "@babel/runtime": "^7.25.7", "classnames": "2.x", "rc-select": "~14.16.2", "rc-tree": "~5.13.0", "rc-util": "^5.43.0" }, "peerDependencies": { "react": "*", "react-dom": "*" } }, "sha512-2qTBTzwIT7LRI1o7zLyrCzmo5tQanmyGbSaGTIf7sYimCklAToVVfpMC6OAldSKolcnjorBYPNSKQqJmN3TCww=="],
|
||||
|
||||
"rc-upload": ["rc-upload@4.11.0", "", { "dependencies": { "@babel/runtime": "^7.18.3", "classnames": "^2.2.5", "rc-util": "^5.2.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-ZUyT//2JAehfHzjWowqROcwYJKnZkIUGWaTE/VogVrepSl7AFNbQf4+zGfX4zl9Vrj/Jm8scLO0R6UlPDKK4wA=="],
|
||||
|
||||
"rc-util": ["rc-util@5.44.4", "", { "dependencies": { "@babel/runtime": "^7.18.3", "react-is": "^18.2.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-resueRJzmHG9Q6rI/DfK6Kdv9/Lfls05vzMs1Sk3M2P+3cJa+MakaZyWY8IPfehVuhPJFKrIY1IK4GqbiaiY5w=="],
|
||||
|
||||
"rc-virtual-list": ["rc-virtual-list@3.19.2", "", { "dependencies": { "@babel/runtime": "^7.20.0", "classnames": "^2.2.6", "rc-resize-observer": "^1.0.0", "rc-util": "^5.36.0" }, "peerDependencies": { "react": ">=16.9.0", "react-dom": ">=16.9.0" } }, "sha512-Ys6NcjwGkuwkeaWBDqfI3xWuZ7rDiQXlH1o2zLfFzATfEgXcqpk8CkgMfbJD81McqjcJVez25a3kPxCR807evA=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"react-draggable": ["react-draggable@4.5.0", "", { "dependencies": { "clsx": "^2.1.1", "prop-types": "^15.8.1" }, "peerDependencies": { "react": ">= 16.3.0", "react-dom": ">= 16.3.0" } }, "sha512-VC+HBLEZ0XJxnOxVAZsdRi8rD04Iz3SiiKOoYzamjylUcju/hP9np/aZdLHf/7WOD268WMoNJMvYfB5yAK45cw=="],
|
||||
|
||||
"react-is": ["react-is@18.3.1", "", {}, "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg=="],
|
||||
|
||||
"react-refresh": ["react-refresh@0.17.0", "", {}, "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ=="],
|
||||
|
||||
"react-resizable": ["react-resizable@3.1.3", "", { "dependencies": { "prop-types": "15.x", "react-draggable": "^4.5.0" }, "peerDependencies": { "react": ">= 16.3", "react-dom": ">= 16.3" } }, "sha512-liJBNayhX7qA4tBJiBD321FDhJxgGTJ07uzH5zSORXoE8h7PyEZ8mLqmosST7ppf6C4zUsbd2gzDMmBCfFp9Lw=="],
|
||||
|
||||
"react-router": ["react-router@6.30.3", "", { "dependencies": { "@remix-run/router": "1.23.2" }, "peerDependencies": { "react": ">=16.8" } }, "sha512-XRnlbKMTmktBkjCLE8/XcZFlnHvr2Ltdr1eJX4idL55/9BbORzyZEaIkBFDhFGCEWBBItsVrDxwx3gnisMitdw=="],
|
||||
|
||||
"react-router-dom": ["react-router-dom@6.30.3", "", { "dependencies": { "@remix-run/router": "1.23.2", "react-router": "6.30.3" }, "peerDependencies": { "react": ">=16.8", "react-dom": ">=16.8" } }, "sha512-pxPcv1AczD4vso7G4Z3TKcvlxK7g7TNt3/FNGMhfqyntocvYKj+GCatfigGDjbLozC4baguJ0ReCigoDJXb0ag=="],
|
||||
|
||||
"resize-observer-polyfill": ["resize-observer-polyfill@1.5.1", "", {}, "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg=="],
|
||||
|
||||
"rollup": ["rollup@4.57.1", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.57.1", "@rollup/rollup-android-arm64": "4.57.1", "@rollup/rollup-darwin-arm64": "4.57.1", "@rollup/rollup-darwin-x64": "4.57.1", "@rollup/rollup-freebsd-arm64": "4.57.1", "@rollup/rollup-freebsd-x64": "4.57.1", "@rollup/rollup-linux-arm-gnueabihf": "4.57.1", "@rollup/rollup-linux-arm-musleabihf": "4.57.1", "@rollup/rollup-linux-arm64-gnu": "4.57.1", "@rollup/rollup-linux-arm64-musl": "4.57.1", "@rollup/rollup-linux-loong64-gnu": "4.57.1", "@rollup/rollup-linux-loong64-musl": "4.57.1", "@rollup/rollup-linux-ppc64-gnu": "4.57.1", "@rollup/rollup-linux-ppc64-musl": "4.57.1", "@rollup/rollup-linux-riscv64-gnu": "4.57.1", "@rollup/rollup-linux-riscv64-musl": "4.57.1", "@rollup/rollup-linux-s390x-gnu": "4.57.1", "@rollup/rollup-linux-x64-gnu": "4.57.1", "@rollup/rollup-linux-x64-musl": "4.57.1", "@rollup/rollup-openbsd-x64": "4.57.1", "@rollup/rollup-openharmony-arm64": "4.57.1", "@rollup/rollup-win32-arm64-msvc": "4.57.1", "@rollup/rollup-win32-ia32-msvc": "4.57.1", "@rollup/rollup-win32-x64-gnu": "4.57.1", "@rollup/rollup-win32-x64-msvc": "4.57.1", "fsevents": "~2.3.2" }, "bin": "dist/bin/rollup" }, "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"scroll-into-view-if-needed": ["scroll-into-view-if-needed@3.1.0", "", { "dependencies": { "compute-scroll-into-view": "^3.0.2" } }, "sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ=="],
|
||||
|
||||
"semver": ["semver@6.3.1", "", { "bin": "bin/semver.js" }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="],
|
||||
|
||||
"simplex-noise": ["simplex-noise@4.0.3", "", {}, "sha512-qSE2I4AngLQG7BXqoZj51jokT4WUXe8mOBrvfOXpci8+6Yu44+/dD5zqDpOx3Ux792eamTd2lLcI8jqFntk/lg=="],
|
||||
|
||||
"socket.io-client": ["socket.io-client@4.8.3", "", { "dependencies": { "@socket.io/component-emitter": "~3.1.0", "debug": "~4.4.1", "engine.io-client": "~6.6.1", "socket.io-parser": "~4.2.4" } }, "sha512-uP0bpjWrjQmUt5DTHq9RuoCBdFJF10cdX9X+a368j/Ft0wmaVgxlrjvK3kjvgCODOMMOz9lcaRzxmso0bTWZ/g=="],
|
||||
|
||||
"socket.io-parser": ["socket.io-parser@4.2.5", "", { "dependencies": { "@socket.io/component-emitter": "~3.1.0", "debug": "~4.4.1" } }, "sha512-bPMmpy/5WWKHea5Y/jYAP6k74A+hvmRCQaJuJB6I/ML5JZq/KfNieUVo/3Mh7SAqn7TyFdIo6wqYHInG1MU1bQ=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"string-convert": ["string-convert@0.2.1", "", {}, "sha512-u/1tdPl4yQnPBjnVrmdLo9gtuLvELKsAoRapekWggdiQNvvvum+jYF329d84NAa660KQw7pB2n36KrIKVoXa3A=="],
|
||||
|
||||
"stylis": ["stylis@4.3.6", "", {}, "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ=="],
|
||||
|
||||
"three": ["three@0.160.1", "", {}, "sha512-Bgl2wPJypDOZ1stAxwfWAcJ0WQf7QzlptsxkjYiURPz+n5k4RBDLsq+6f9Y75TYxn6aHLcWz+JNmwTOXWrQTBQ=="],
|
||||
|
||||
"throttle-debounce": ["throttle-debounce@5.0.2", "", {}, "sha512-B71/4oyj61iNH0KeCamLuE2rmKuTO5byTOSVwECM5FA7TiAiAW+UqTKZ9ERueC4qvgSttUhdmq1mXC3kJqGX7A=="],
|
||||
|
||||
"toggle-selection": ["toggle-selection@1.0.6", "", {}, "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
"update-browserslist-db": ["update-browserslist-db@1.2.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": "cli.js" }, "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w=="],
|
||||
|
||||
"use-sync-external-store": ["use-sync-external-store@1.6.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w=="],
|
||||
|
||||
"vite": ["vite@5.4.21", "", { "dependencies": { "esbuild": "^0.21.3", "postcss": "^8.4.43", "rollup": "^4.20.0" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || >=20.0.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.4.0" }, "optionalPeers": ["@types/node", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser"], "bin": "bin/vite.js" }, "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw=="],
|
||||
|
||||
"ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="],
|
||||
|
||||
"xmlhttprequest-ssl": ["xmlhttprequest-ssl@2.1.2", "", {}, "sha512-TEU+nJVUUnA4CYJFLvK5X9AOeH4KvDvhIfm0vV1GaQRtchnG0hgK5p8hw/xjv8cunWYCsiPCSDzObPyhEwq3KQ=="],
|
||||
|
||||
"yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
|
||||
|
||||
"zustand": ["zustand@4.5.7", "", { "dependencies": { "use-sync-external-store": "^1.2.2" }, "peerDependencies": { "@types/react": ">=16.8", "immer": ">=9.0.6", "react": ">=16.8" }, "optionalPeers": ["immer"] }, "sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw=="],
|
||||
|
||||
"prop-types/react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="],
|
||||
}
|
||||
}
|
||||
|
Before Width: | Height: | Size: 4.4 MiB After Width: | Height: | Size: 4.4 MiB |
|
Before Width: | Height: | Size: 18 MiB After Width: | Height: | Size: 18 MiB |
1
frontend/legacy/3dearthmult/landing-point-geo.geojson
Normal file
1
frontend/legacy/3dearthmult/relation.json
Normal file
4
frontend/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "planet-frontend",
|
||||
"version": "1.0.0",
|
||||
"version": "0.21.7",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "planet-frontend",
|
||||
"version": "1.0.0",
|
||||
"version": "0.21.7",
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^5.2.6",
|
||||
"antd": "^5.12.5",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "planet-frontend",
|
||||
"version": "1.0.0",
|
||||
"version": "0.21.7",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^5.2.6",
|
||||
|
||||
435
frontend/public/earth/_backup/dock-centered-20260326/base.css
Normal file
@@ -0,0 +1,435 @@
|
||||
/* base.css - 公共基础样式 */
|
||||
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
background-color: #0a0a1a;
|
||||
color: #fff;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
#container {
|
||||
position: relative;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
}
|
||||
|
||||
#container.dragging {
|
||||
cursor: grabbing;
|
||||
}
|
||||
|
||||
/* Bottom Dock */
|
||||
#right-toolbar-group {
|
||||
position: absolute;
|
||||
bottom: 20px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 200;
|
||||
}
|
||||
|
||||
#right-toolbar-group,
|
||||
#info-panel,
|
||||
#coordinates-display,
|
||||
#legend,
|
||||
#earth-stats {
|
||||
transition:
|
||||
top 0.45s ease,
|
||||
right 0.45s ease,
|
||||
bottom 0.45s ease,
|
||||
left 0.45s ease,
|
||||
transform 0.45s ease,
|
||||
box-shadow 0.45s ease;
|
||||
}
|
||||
|
||||
/* Zoom Toolbar - Right side, vertical */
|
||||
#zoom-toolbar {
|
||||
position: relative;
|
||||
bottom: auto;
|
||||
right: auto;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
#zoom-toolbar .zoom-percent {
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
color: #4db8ff;
|
||||
min-width: 30px;
|
||||
display: inline-block;
|
||||
text-align: center;
|
||||
cursor: pointer;
|
||||
padding: 2px 4px;
|
||||
border-radius: 3px;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
#zoom-toolbar .zoom-percent:hover {
|
||||
background: rgba(77, 184, 255, 0.2);
|
||||
box-shadow: 0 0 10px rgba(77, 184, 255, 0.3);
|
||||
}
|
||||
|
||||
#zoom-toolbar .zoom-btn {
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
min-width: 28px;
|
||||
border: none;
|
||||
border-radius: 50%;
|
||||
background: rgba(77, 184, 255, 0.2);
|
||||
color: #4db8ff;
|
||||
font-size: 14px;
|
||||
font-weight: bold;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
transition: all 0.2s ease;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
flex: 0 0 auto;
|
||||
box-sizing: border-box;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
#zoom-toolbar .zoom-btn:hover {
|
||||
background: rgba(77, 184, 255, 0.4);
|
||||
transform: scale(1.1);
|
||||
box-shadow: 0 0 10px rgba(77, 184, 255, 0.5);
|
||||
}
|
||||
|
||||
#zoom-toolbar #reset-view svg {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
stroke: currentColor;
|
||||
stroke-width: 1.8;
|
||||
fill: none;
|
||||
stroke-linecap: round;
|
||||
stroke-linejoin: round;
|
||||
}
|
||||
|
||||
#zoom-toolbar .zoom-percent {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
#zoom-toolbar .tooltip {
|
||||
position: absolute;
|
||||
bottom: calc(100% + 12px);
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: rgba(10, 10, 30, 0.95);
|
||||
color: #fff;
|
||||
padding: 6px 12px;
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
white-space: nowrap;
|
||||
opacity: 0;
|
||||
visibility: hidden;
|
||||
transition: all 0.2s ease;
|
||||
border: 1px solid rgba(77, 184, 255, 0.4);
|
||||
pointer-events: none;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
#zoom-toolbar .zoom-btn:hover .tooltip,
|
||||
#zoom-toolbar .zoom-percent:hover .tooltip {
|
||||
opacity: 1;
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
#zoom-toolbar .tooltip::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
border: 6px solid transparent;
|
||||
border-top-color: rgba(77, 184, 255, 0.4);
|
||||
}
|
||||
|
||||
#loading {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
font-size: 1.2rem;
|
||||
color: #4db8ff;
|
||||
z-index: 100;
|
||||
text-align: center;
|
||||
background-color: rgba(10, 10, 30, 0.95);
|
||||
padding: 30px;
|
||||
border-radius: 10px;
|
||||
border: 1px solid #4db8ff;
|
||||
box-shadow: 0 0 30px rgba(77,184,255,0.3);
|
||||
}
|
||||
|
||||
#loading-spinner {
|
||||
border: 4px solid rgba(77, 184, 255, 0.3);
|
||||
border-top: 4px solid #4db8ff;
|
||||
border-radius: 50%;
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
animation: spin 1s linear infinite;
|
||||
margin: 0 auto 15px;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
0% { transform: rotate(0deg); }
|
||||
100% { transform: rotate(360deg); }
|
||||
}
|
||||
|
||||
.error-message {
|
||||
color: #ff4444;
|
||||
margin-top: 10px;
|
||||
font-size: 0.9rem;
|
||||
display: none;
|
||||
padding: 10px;
|
||||
background-color: rgba(255, 68, 68, 0.1);
|
||||
border-radius: 5px;
|
||||
border-left: 3px solid #ff4444;
|
||||
}
|
||||
|
||||
.terrain-controls {
|
||||
margin-top: 15px;
|
||||
padding-top: 15px;
|
||||
border-top: 1px solid rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
|
||||
.slider-container {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.slider-label {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 5px;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
input[type="range"] {
|
||||
width: 100%;
|
||||
height: 8px;
|
||||
-webkit-appearance: none;
|
||||
background: rgba(0, 102, 204, 0.3);
|
||||
border-radius: 4px;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
input[type="range"]::-webkit-slider-thumb {
|
||||
-webkit-appearance: none;
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
border-radius: 50%;
|
||||
background: #4db8ff;
|
||||
cursor: pointer;
|
||||
box-shadow: 0 0 10px #4db8ff;
|
||||
}
|
||||
|
||||
.status-message {
|
||||
position: absolute;
|
||||
top: 20px;
|
||||
right: 260px;
|
||||
background-color: rgba(10, 10, 30, 0.85);
|
||||
border-radius: 10px;
|
||||
padding: 10px 15px;
|
||||
z-index: 10;
|
||||
box-shadow: 0 0 20px rgba(0, 150, 255, 0.3);
|
||||
border: 1px solid rgba(0, 150, 255, 0.2);
|
||||
font-size: 0.9rem;
|
||||
display: none;
|
||||
backdrop-filter: blur(5px);
|
||||
}
|
||||
|
||||
.status-message.success {
|
||||
color: #44ff44;
|
||||
border-left: 3px solid #44ff44;
|
||||
}
|
||||
|
||||
.status-message.warning {
|
||||
color: #ffff44;
|
||||
border-left: 3px solid #ffff44;
|
||||
}
|
||||
|
||||
.status-message.error {
|
||||
color: #ff4444;
|
||||
border-left: 3px solid #ff4444;
|
||||
}
|
||||
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
background-color: rgba(10, 10, 30, 0.95);
|
||||
border: 1px solid #4db8ff;
|
||||
border-radius: 5px;
|
||||
padding: 5px 10px;
|
||||
font-size: 0.8rem;
|
||||
color: #fff;
|
||||
pointer-events: none;
|
||||
z-index: 100;
|
||||
box-shadow: 0 0 10px rgba(77, 184, 255, 0.3);
|
||||
display: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
/* Control Toolbar - Stellarium/Star Walk style */
|
||||
#control-toolbar {
|
||||
position: relative;
|
||||
bottom: auto;
|
||||
right: auto;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 12px;
|
||||
background: rgba(10, 10, 30, 0.9);
|
||||
border-radius: 999px;
|
||||
padding: 10px 14px;
|
||||
border: 1px solid rgba(77, 184, 255, 0.3);
|
||||
box-shadow: 0 0 20px rgba(77, 184, 255, 0.2);
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
|
||||
.toolbar-items {
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
flex-wrap: nowrap;
|
||||
}
|
||||
|
||||
.toolbar-divider {
|
||||
width: 1px;
|
||||
height: 28px;
|
||||
background: rgba(77, 184, 255, 0.28);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.toolbar-btn {
|
||||
position: relative;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
border: none;
|
||||
border-radius: 50%;
|
||||
background: rgba(77, 184, 255, 0.15);
|
||||
color: #4db8ff;
|
||||
font-size: 14px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
transition: all 0.2s ease;
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.toolbar-btn:hover {
|
||||
background: rgba(77, 184, 255, 0.35);
|
||||
transform: scale(1.1);
|
||||
box-shadow: 0 0 15px rgba(77, 184, 255, 0.5);
|
||||
}
|
||||
|
||||
.toolbar-btn:active {
|
||||
transform: scale(0.95);
|
||||
}
|
||||
|
||||
.toolbar-btn.active {
|
||||
background: rgba(77, 184, 255, 0.4);
|
||||
box-shadow: 0 0 10px rgba(77, 184, 255, 0.4) inset;
|
||||
}
|
||||
|
||||
.toolbar-btn .icon {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.toolbar-btn svg {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
stroke: currentColor;
|
||||
stroke-width: 2.1;
|
||||
fill: none;
|
||||
stroke-linecap: round;
|
||||
stroke-linejoin: round;
|
||||
}
|
||||
|
||||
#rotate-toggle .icon-play,
|
||||
#rotate-toggle.is-stopped .icon-pause {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#rotate-toggle.is-stopped .icon-play {
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
#container.layout-expanded #info-panel {
|
||||
top: 20px;
|
||||
left: 20px;
|
||||
transform: translate(calc(-100% + 20px), calc(-100% + 20px));
|
||||
}
|
||||
|
||||
#container.layout-expanded #coordinates-display {
|
||||
top: 20px;
|
||||
right: 20px;
|
||||
transform: translate(calc(100% - 20px), calc(-100% + 20px));
|
||||
}
|
||||
|
||||
#container.layout-expanded #legend {
|
||||
left: 20px;
|
||||
bottom: 20px;
|
||||
transform: translate(calc(-100% + 20px), calc(100% - 20px));
|
||||
}
|
||||
|
||||
#container.layout-expanded #earth-stats {
|
||||
right: 20px;
|
||||
bottom: 20px;
|
||||
transform: translate(calc(100% - 20px), calc(100% - 20px));
|
||||
}
|
||||
|
||||
#container.layout-expanded #right-toolbar-group {
|
||||
bottom: 20px;
|
||||
transform: translateX(-50%);
|
||||
}
|
||||
|
||||
.toolbar-btn .tooltip {
|
||||
position: absolute;
|
||||
bottom: 50px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: rgba(10, 10, 30, 0.95);
|
||||
color: #fff;
|
||||
padding: 6px 12px;
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
white-space: nowrap;
|
||||
opacity: 0;
|
||||
visibility: hidden;
|
||||
transition: all 0.2s ease;
|
||||
border: 1px solid rgba(77, 184, 255, 0.4);
|
||||
pointer-events: none;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
.toolbar-btn:hover .tooltip {
|
||||
opacity: 1;
|
||||
visibility: visible;
|
||||
bottom: 52px;
|
||||
}
|
||||
|
||||
.toolbar-btn .tooltip::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
border: 6px solid transparent;
|
||||
border-top-color: rgba(77, 184, 255, 0.4);
|
||||
}
|
||||
421
frontend/public/earth/_backup/dock-centered-20260326/controls.js
vendored
Normal file
@@ -0,0 +1,421 @@
|
||||
// controls.js - Zoom, rotate and toggle controls
|
||||
|
||||
import { CONFIG, EARTH_CONFIG } from "./constants.js";
|
||||
import { updateZoomDisplay, showStatusMessage } from "./ui.js";
|
||||
import { toggleTerrain } from "./earth.js";
|
||||
import { reloadData, clearLockedObject } from "./main.js";
|
||||
import {
|
||||
toggleSatellites,
|
||||
toggleTrails,
|
||||
getShowSatellites,
|
||||
getSatelliteCount,
|
||||
} from "./satellites.js";
|
||||
import { toggleCables, getShowCables } from "./cables.js";
|
||||
|
||||
export let autoRotate = true;
|
||||
export let zoomLevel = 1.0;
|
||||
export let showTerrain = false;
|
||||
export let isDragging = false;
|
||||
export let layoutExpanded = false;
|
||||
|
||||
let earthObj = null;
|
||||
let listeners = [];
|
||||
let cleanupFns = [];
|
||||
|
||||
function bindListener(element, eventName, handler, options) {
|
||||
if (!element) return;
|
||||
element.addEventListener(eventName, handler, options);
|
||||
listeners.push(() =>
|
||||
element.removeEventListener(eventName, handler, options),
|
||||
);
|
||||
}
|
||||
|
||||
function resetCleanup() {
|
||||
cleanupFns.forEach((cleanup) => cleanup());
|
||||
cleanupFns = [];
|
||||
listeners.forEach((cleanup) => cleanup());
|
||||
listeners = [];
|
||||
}
|
||||
|
||||
export function setupControls(camera, renderer, scene, earth) {
|
||||
resetCleanup();
|
||||
earthObj = earth;
|
||||
setupZoomControls(camera);
|
||||
setupWheelZoom(camera, renderer);
|
||||
setupRotateControls(camera, earth);
|
||||
setupTerrainControls();
|
||||
}
|
||||
|
||||
function setupZoomControls(camera) {
|
||||
let zoomInterval = null;
|
||||
let holdTimeout = null;
|
||||
let startTime = 0;
|
||||
const HOLD_THRESHOLD = 150;
|
||||
const LONG_PRESS_TICK = 50;
|
||||
const CLICK_STEP = 10;
|
||||
|
||||
const MIN_PERCENT = CONFIG.minZoom * 100;
|
||||
const MAX_PERCENT = CONFIG.maxZoom * 100;
|
||||
|
||||
function doZoomStep(direction) {
|
||||
let currentPercent = Math.round(zoomLevel * 100);
|
||||
let newPercent =
|
||||
direction > 0 ? currentPercent + CLICK_STEP : currentPercent - CLICK_STEP;
|
||||
|
||||
if (newPercent > MAX_PERCENT) newPercent = MAX_PERCENT;
|
||||
if (newPercent < MIN_PERCENT) newPercent = MIN_PERCENT;
|
||||
|
||||
zoomLevel = newPercent / 100;
|
||||
applyZoom(camera);
|
||||
}
|
||||
|
||||
function doContinuousZoom(direction) {
|
||||
let currentPercent = Math.round(zoomLevel * 100);
|
||||
let newPercent = direction > 0 ? currentPercent + 1 : currentPercent - 1;
|
||||
|
||||
if (newPercent > MAX_PERCENT) newPercent = MAX_PERCENT;
|
||||
if (newPercent < MIN_PERCENT) newPercent = MIN_PERCENT;
|
||||
|
||||
zoomLevel = newPercent / 100;
|
||||
applyZoom(camera);
|
||||
}
|
||||
|
||||
function startContinuousZoom(direction) {
|
||||
doContinuousZoom(direction);
|
||||
zoomInterval = window.setInterval(() => {
|
||||
doContinuousZoom(direction);
|
||||
}, LONG_PRESS_TICK);
|
||||
}
|
||||
|
||||
function stopZoom() {
|
||||
if (zoomInterval) {
|
||||
clearInterval(zoomInterval);
|
||||
zoomInterval = null;
|
||||
}
|
||||
if (holdTimeout) {
|
||||
clearTimeout(holdTimeout);
|
||||
holdTimeout = null;
|
||||
}
|
||||
}
|
||||
|
||||
function handleMouseDown(direction) {
|
||||
startTime = Date.now();
|
||||
stopZoom();
|
||||
holdTimeout = window.setTimeout(() => {
|
||||
startContinuousZoom(direction);
|
||||
}, HOLD_THRESHOLD);
|
||||
}
|
||||
|
||||
function handleMouseUp(direction) {
|
||||
const heldTime = Date.now() - startTime;
|
||||
stopZoom();
|
||||
if (heldTime < HOLD_THRESHOLD) {
|
||||
doZoomStep(direction);
|
||||
}
|
||||
}
|
||||
|
||||
cleanupFns.push(stopZoom);
|
||||
|
||||
const zoomIn = document.getElementById("zoom-in");
|
||||
const zoomOut = document.getElementById("zoom-out");
|
||||
const zoomValue = document.getElementById("zoom-value");
|
||||
|
||||
bindListener(zoomIn, "mousedown", () => handleMouseDown(1));
|
||||
bindListener(zoomIn, "mouseup", () => handleMouseUp(1));
|
||||
bindListener(zoomIn, "mouseleave", stopZoom);
|
||||
bindListener(zoomIn, "touchstart", (e) => {
|
||||
e.preventDefault();
|
||||
handleMouseDown(1);
|
||||
});
|
||||
bindListener(zoomIn, "touchend", () => handleMouseUp(1));
|
||||
|
||||
bindListener(zoomOut, "mousedown", () => handleMouseDown(-1));
|
||||
bindListener(zoomOut, "mouseup", () => handleMouseUp(-1));
|
||||
bindListener(zoomOut, "mouseleave", stopZoom);
|
||||
bindListener(zoomOut, "touchstart", (e) => {
|
||||
e.preventDefault();
|
||||
handleMouseDown(-1);
|
||||
});
|
||||
bindListener(zoomOut, "touchend", () => handleMouseUp(-1));
|
||||
|
||||
bindListener(zoomValue, "click", () => {
|
||||
const startZoomVal = zoomLevel;
|
||||
const targetZoom = 1.0;
|
||||
const startDistance = CONFIG.defaultCameraZ / startZoomVal;
|
||||
const targetDistance = CONFIG.defaultCameraZ / targetZoom;
|
||||
|
||||
animateValue(
|
||||
0,
|
||||
1,
|
||||
600,
|
||||
(progress) => {
|
||||
const ease = 1 - Math.pow(1 - progress, 3);
|
||||
zoomLevel = startZoomVal + (targetZoom - startZoomVal) * ease;
|
||||
camera.position.z = CONFIG.defaultCameraZ / zoomLevel;
|
||||
const distance =
|
||||
startDistance + (targetDistance - startDistance) * ease;
|
||||
updateZoomDisplay(zoomLevel, distance.toFixed(0));
|
||||
},
|
||||
() => {
|
||||
zoomLevel = 1.0;
|
||||
showStatusMessage("缩放已重置到100%", "info");
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function setupWheelZoom(camera, renderer) {
|
||||
bindListener(
|
||||
renderer?.domElement,
|
||||
"wheel",
|
||||
(e) => {
|
||||
e.preventDefault();
|
||||
if (e.deltaY < 0) {
|
||||
zoomLevel = Math.min(zoomLevel + 0.1, CONFIG.maxZoom);
|
||||
} else {
|
||||
zoomLevel = Math.max(zoomLevel - 0.1, CONFIG.minZoom);
|
||||
}
|
||||
applyZoom(camera);
|
||||
},
|
||||
{ passive: false },
|
||||
);
|
||||
}
|
||||
|
||||
function applyZoom(camera) {
|
||||
camera.position.z = CONFIG.defaultCameraZ / zoomLevel;
|
||||
const distance = camera.position.z.toFixed(0);
|
||||
updateZoomDisplay(zoomLevel, distance);
|
||||
}
|
||||
|
||||
function animateValue(start, end, duration, onUpdate, onComplete) {
|
||||
const startTime = performance.now();
|
||||
|
||||
function update(currentTime) {
|
||||
const elapsed = currentTime - startTime;
|
||||
const progress = Math.min(elapsed / duration, 1);
|
||||
const easeProgress = 1 - Math.pow(1 - progress, 3);
|
||||
|
||||
const current = start + (end - start) * easeProgress;
|
||||
onUpdate(current);
|
||||
|
||||
if (progress < 1) {
|
||||
requestAnimationFrame(update);
|
||||
} else if (onComplete) {
|
||||
onComplete();
|
||||
}
|
||||
}
|
||||
|
||||
requestAnimationFrame(update);
|
||||
}
|
||||
|
||||
export function resetView(camera) {
|
||||
if (!earthObj) return;
|
||||
|
||||
function animateToView(targetLat, targetLon, targetRotLon) {
|
||||
const latRot = (targetLat * Math.PI) / 180;
|
||||
const targetRotX =
|
||||
EARTH_CONFIG.tiltRad + latRot * EARTH_CONFIG.latCoefficient;
|
||||
const targetRotY = -((targetRotLon * Math.PI) / 180);
|
||||
|
||||
const startRotX = earthObj.rotation.x;
|
||||
const startRotY = earthObj.rotation.y;
|
||||
const startZoom = zoomLevel;
|
||||
const targetZoom = 1.0;
|
||||
|
||||
animateValue(
|
||||
0,
|
||||
1,
|
||||
800,
|
||||
(progress) => {
|
||||
const ease = 1 - Math.pow(1 - progress, 3);
|
||||
earthObj.rotation.x = startRotX + (targetRotX - startRotX) * ease;
|
||||
earthObj.rotation.y = startRotY + (targetRotY - startRotY) * ease;
|
||||
|
||||
zoomLevel = startZoom + (targetZoom - startZoom) * ease;
|
||||
camera.position.z = CONFIG.defaultCameraZ / zoomLevel;
|
||||
updateZoomDisplay(zoomLevel, camera.position.z.toFixed(0));
|
||||
},
|
||||
() => {
|
||||
zoomLevel = 1.0;
|
||||
showStatusMessage("视角已重置", "info");
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if (navigator.geolocation) {
|
||||
navigator.geolocation.getCurrentPosition(
|
||||
(pos) =>
|
||||
animateToView(
|
||||
pos.coords.latitude,
|
||||
pos.coords.longitude,
|
||||
-pos.coords.longitude,
|
||||
),
|
||||
() =>
|
||||
animateToView(
|
||||
EARTH_CONFIG.chinaLat,
|
||||
EARTH_CONFIG.chinaLon,
|
||||
EARTH_CONFIG.chinaRotLon,
|
||||
),
|
||||
{ timeout: 5000, enableHighAccuracy: false },
|
||||
);
|
||||
} else {
|
||||
animateToView(
|
||||
EARTH_CONFIG.chinaLat,
|
||||
EARTH_CONFIG.chinaLon,
|
||||
EARTH_CONFIG.chinaRotLon,
|
||||
);
|
||||
}
|
||||
|
||||
clearLockedObject();
|
||||
}
|
||||
|
||||
function setupRotateControls(camera) {
|
||||
const rotateBtn = document.getElementById("rotate-toggle");
|
||||
const resetViewBtn = document.getElementById("reset-view");
|
||||
|
||||
bindListener(rotateBtn, "click", () => {
|
||||
const isRotating = toggleAutoRotate();
|
||||
showStatusMessage(isRotating ? "自动旋转已开启" : "自动旋转已暂停", "info");
|
||||
});
|
||||
|
||||
updateRotateUI();
|
||||
|
||||
bindListener(resetViewBtn, "click", () => {
|
||||
resetView(camera);
|
||||
});
|
||||
}
|
||||
|
||||
function setupTerrainControls() {
|
||||
const container = document.getElementById("container");
|
||||
const terrainBtn = document.getElementById("toggle-terrain");
|
||||
const satellitesBtn = document.getElementById("toggle-satellites");
|
||||
const trailsBtn = document.getElementById("toggle-trails");
|
||||
const cablesBtn = document.getElementById("toggle-cables");
|
||||
const layoutBtn = document.getElementById("layout-toggle");
|
||||
const reloadBtn = document.getElementById("reload-data");
|
||||
|
||||
if (trailsBtn) {
|
||||
trailsBtn.classList.add("active");
|
||||
const tooltip = trailsBtn.querySelector(".tooltip");
|
||||
if (tooltip) tooltip.textContent = "隐藏轨迹";
|
||||
}
|
||||
|
||||
bindListener(terrainBtn, "click", function () {
|
||||
showTerrain = !showTerrain;
|
||||
toggleTerrain(showTerrain);
|
||||
this.classList.toggle("active", showTerrain);
|
||||
const tooltip = this.querySelector(".tooltip");
|
||||
if (tooltip) tooltip.textContent = showTerrain ? "隐藏地形" : "显示地形";
|
||||
const terrainStatus = document.getElementById("terrain-status");
|
||||
if (terrainStatus)
|
||||
terrainStatus.textContent = showTerrain ? "开启" : "关闭";
|
||||
showStatusMessage(showTerrain ? "地形已显示" : "地形已隐藏", "info");
|
||||
});
|
||||
|
||||
bindListener(satellitesBtn, "click", function () {
|
||||
const showSats = !getShowSatellites();
|
||||
if (!showSats) {
|
||||
clearLockedObject();
|
||||
}
|
||||
toggleSatellites(showSats);
|
||||
this.classList.toggle("active", showSats);
|
||||
const tooltip = this.querySelector(".tooltip");
|
||||
if (tooltip) tooltip.textContent = showSats ? "隐藏卫星" : "显示卫星";
|
||||
const satelliteCountEl = document.getElementById("satellite-count");
|
||||
if (satelliteCountEl)
|
||||
satelliteCountEl.textContent = getSatelliteCount() + " 颗";
|
||||
showStatusMessage(showSats ? "卫星已显示" : "卫星已隐藏", "info");
|
||||
});
|
||||
|
||||
bindListener(trailsBtn, "click", function () {
|
||||
const isActive = this.classList.contains("active");
|
||||
const nextShowTrails = !isActive;
|
||||
toggleTrails(nextShowTrails);
|
||||
this.classList.toggle("active", nextShowTrails);
|
||||
const tooltip = this.querySelector(".tooltip");
|
||||
if (tooltip) tooltip.textContent = nextShowTrails ? "隐藏轨迹" : "显示轨迹";
|
||||
showStatusMessage(nextShowTrails ? "轨迹已显示" : "轨迹已隐藏", "info");
|
||||
});
|
||||
|
||||
bindListener(cablesBtn, "click", function () {
|
||||
const showNextCables = !getShowCables();
|
||||
if (!showNextCables) {
|
||||
clearLockedObject();
|
||||
}
|
||||
toggleCables(showNextCables);
|
||||
this.classList.toggle("active", showNextCables);
|
||||
const tooltip = this.querySelector(".tooltip");
|
||||
if (tooltip) tooltip.textContent = showNextCables ? "隐藏线缆" : "显示线缆";
|
||||
showStatusMessage(showNextCables ? "线缆已显示" : "线缆已隐藏", "info");
|
||||
});
|
||||
|
||||
bindListener(reloadBtn, "click", async () => {
|
||||
await reloadData();
|
||||
});
|
||||
|
||||
bindListener(layoutBtn, "click", () => {
|
||||
const expanded = toggleLayoutExpanded(container);
|
||||
showStatusMessage(expanded ? "布局已最大化" : "布局已恢复", "info");
|
||||
});
|
||||
|
||||
updateLayoutUI(container);
|
||||
}
|
||||
|
||||
export function teardownControls() {
|
||||
resetCleanup();
|
||||
}
|
||||
|
||||
export function getAutoRotate() {
|
||||
return autoRotate;
|
||||
}
|
||||
|
||||
function updateRotateUI() {
|
||||
const btn = document.getElementById("rotate-toggle");
|
||||
if (btn) {
|
||||
btn.classList.toggle("active", autoRotate);
|
||||
btn.classList.toggle("is-stopped", !autoRotate);
|
||||
const tooltip = btn.querySelector(".tooltip");
|
||||
if (tooltip) tooltip.textContent = autoRotate ? "暂停旋转" : "开始旋转";
|
||||
}
|
||||
}
|
||||
|
||||
export function setAutoRotate(value) {
|
||||
autoRotate = value;
|
||||
updateRotateUI();
|
||||
}
|
||||
|
||||
export function toggleAutoRotate() {
|
||||
autoRotate = !autoRotate;
|
||||
updateRotateUI();
|
||||
clearLockedObject();
|
||||
return autoRotate;
|
||||
}
|
||||
|
||||
export function getZoomLevel() {
|
||||
return zoomLevel;
|
||||
}
|
||||
|
||||
export function getShowTerrain() {
|
||||
return showTerrain;
|
||||
}
|
||||
|
||||
function updateLayoutUI(container) {
|
||||
if (container) {
|
||||
container.classList.toggle("layout-expanded", layoutExpanded);
|
||||
}
|
||||
|
||||
const btn = document.getElementById("layout-toggle");
|
||||
if (btn) {
|
||||
btn.classList.toggle("active", layoutExpanded);
|
||||
const tooltip = btn.querySelector(".tooltip");
|
||||
const nextLabel = layoutExpanded ? "恢复布局" : "最大化布局";
|
||||
btn.title = nextLabel;
|
||||
if (tooltip) tooltip.textContent = nextLabel;
|
||||
}
|
||||
}
|
||||
|
||||
function toggleLayoutExpanded(container) {
|
||||
layoutExpanded = !layoutExpanded;
|
||||
updateLayoutUI(container);
|
||||
return layoutExpanded;
|
||||
}
|
||||
227
frontend/public/earth/_backup/dock-centered-20260326/index.html
Normal file
@@ -0,0 +1,227 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>智能星球计划 - 现实层宇宙全息感知</title>
|
||||
<script type="importmap">
|
||||
{
|
||||
"imports": {
|
||||
"three": "https://esm.sh/three@0.128.0",
|
||||
"simplex-noise": "https://esm.sh/simplex-noise@4.0.1",
|
||||
"satellite.js": "https://esm.sh/satellite.js@5.0.0"
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<link rel="stylesheet" href="css/base.css">
|
||||
<link rel="stylesheet" href="css/info-panel.css">
|
||||
<link rel="stylesheet" href="css/coordinates-display.css">
|
||||
<link rel="stylesheet" href="css/legend.css">
|
||||
<link rel="stylesheet" href="css/earth-stats.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="container">
|
||||
<div id="info-panel">
|
||||
<h1>智能星球计划</h1>
|
||||
<div class="subtitle">现实层宇宙全息感知系统 | 卫星 · 海底光缆 · 算力基础设施</div>
|
||||
|
||||
<div id="info-card" class="info-card" style="display: none;">
|
||||
<div class="info-card-header">
|
||||
<span class="info-card-icon" id="info-card-icon">🛰️</span>
|
||||
<h3 id="info-card-title">详情</h3>
|
||||
</div>
|
||||
<div id="info-card-content"></div>
|
||||
</div>
|
||||
|
||||
<div id="error-message" class="error-message"></div>
|
||||
</div>
|
||||
|
||||
<div id="right-toolbar-group">
|
||||
<div id="control-toolbar">
|
||||
<div class="toolbar-items">
|
||||
<button id="layout-toggle" class="toolbar-btn" title="最大化布局">
|
||||
<span class="icon" aria-hidden="true">
|
||||
<svg viewBox="0 0 24 24">
|
||||
<path d="M9 9H5V5"></path>
|
||||
<path d="M15 9h4V5"></path>
|
||||
<path d="M9 15H5v4"></path>
|
||||
<path d="M15 15h4v4"></path>
|
||||
<path d="M5 5l5 5"></path>
|
||||
<path d="M19 5l-5 5"></path>
|
||||
<path d="M5 19l5-5"></path>
|
||||
<path d="M19 19l-5-5"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="tooltip">最大化布局</span>
|
||||
</button>
|
||||
<button id="rotate-toggle" class="toolbar-btn" title="自动旋转">
|
||||
<span class="icon rotate-icon icon-pause" aria-hidden="true">
|
||||
<svg viewBox="0 0 24 24">
|
||||
<path d="M9 6v12"></path>
|
||||
<path d="M15 6v12"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="icon rotate-icon icon-play" aria-hidden="true">
|
||||
<svg viewBox="0 0 24 24">
|
||||
<path d="M8 6.5v11l9-5.5z" fill="currentColor" stroke="none"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="tooltip">自动旋转</span>
|
||||
</button>
|
||||
<button id="toggle-cables" class="toolbar-btn active" title="显示/隐藏线缆">
|
||||
<span class="icon" aria-hidden="true">
|
||||
<svg viewBox="0 0 24 24">
|
||||
<circle cx="12" cy="12" r="6.5"></circle>
|
||||
<path d="M5.8 12h12.4"></path>
|
||||
<path d="M12 5.8a8.5 8.5 0 0 1 0 12.4"></path>
|
||||
<path d="M8 16c2-1.8 6-1.8 8 0"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="tooltip">隐藏线缆</span>
|
||||
</button>
|
||||
<button id="toggle-terrain" class="toolbar-btn" title="显示/隐藏地形">
|
||||
<span class="icon" aria-hidden="true">
|
||||
<svg viewBox="0 0 24 24">
|
||||
<path d="M3 18h18"></path>
|
||||
<path d="M4.5 18l5-7 3 4 3.5-6 3.5 9"></path>
|
||||
<path d="M11 18l2-3 1.5 2"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="tooltip">显示/隐藏地形</span>
|
||||
</button>
|
||||
<button id="toggle-satellites" class="toolbar-btn" title="显示/隐藏卫星">
|
||||
<span class="icon" aria-hidden="true">
|
||||
<svg viewBox="0 0 24 24">
|
||||
<rect x="10" y="10" width="4" height="4" rx="0.8"></rect>
|
||||
<rect x="4" y="9" width="4" height="6" rx="0.8"></rect>
|
||||
<rect x="16" y="9" width="4" height="6" rx="0.8"></rect>
|
||||
<path d="M8 12h2"></path>
|
||||
<path d="M14 12h2"></path>
|
||||
<path d="M12 8V6"></path>
|
||||
<path d="M11 6h2"></path>
|
||||
<path d="M12 14v4"></path>
|
||||
<path d="M10 18h4"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="tooltip">显示卫星</span>
|
||||
</button>
|
||||
<button id="toggle-trails" class="toolbar-btn active" title="显示/隐藏轨迹">
|
||||
<span class="icon" aria-hidden="true">
|
||||
<svg viewBox="0 0 24 24">
|
||||
<path d="M5 17h7"></path>
|
||||
<path d="M7 13.5h8"></path>
|
||||
<path d="M10 10h6"></path>
|
||||
<circle cx="17.5" cy="8.5" r="2.2" fill="currentColor" stroke="none"></circle>
|
||||
<path d="M15.8 10.2l2.8-2.8"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="tooltip">隐藏轨迹</span>
|
||||
</button>
|
||||
<button id="reload-data" class="toolbar-btn" title="重新加载数据">
|
||||
<span class="icon" aria-hidden="true">
|
||||
<svg viewBox="0 0 24 24">
|
||||
<path d="M20 5v5h-5"></path>
|
||||
<path d="M20 10a8 8 0 1 0 2 5"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="tooltip">重新加载数据</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="toolbar-divider" aria-hidden="true"></div>
|
||||
<div id="zoom-toolbar">
|
||||
<button id="zoom-out" class="zoom-btn" title="缩小">−<span class="tooltip">缩小</span></button>
|
||||
<span id="zoom-value" class="zoom-percent" title="重置缩放到100%">100%<span class="tooltip">重置缩放到100%</span></span>
|
||||
<button id="zoom-in" class="zoom-btn" title="放大">+<span class="tooltip">放大</span></button>
|
||||
<button id="reset-view" class="zoom-btn" title="重置视角">
|
||||
<svg viewBox="0 0 24 24" aria-hidden="true">
|
||||
<circle cx="12" cy="12" r="5"></circle>
|
||||
<path d="M12 3v4"></path>
|
||||
<path d="M12 17v4"></path>
|
||||
<path d="M3 12h4"></path>
|
||||
<path d="M17 12h4"></path>
|
||||
<circle cx="12" cy="12" r="1.5" fill="currentColor" stroke="none"></circle>
|
||||
</svg>
|
||||
<span class="tooltip">重置视角</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="coordinates-display">
|
||||
<h3 style="color:#4db8ff; margin-bottom:8px; font-size:1.1rem;">坐标信息</h3>
|
||||
<div class="coord-item">
|
||||
<span class="coord-label">经度:</span>
|
||||
<span id="longitude-value" class="coord-value">0.00°</span>
|
||||
</div>
|
||||
<div class="coord-item">
|
||||
<span class="coord-label">纬度:</span>
|
||||
<span id="latitude-value" class="coord-value">0.00°</span>
|
||||
</div>
|
||||
<div id="zoom-level">缩放: 1.0x</div>
|
||||
<div class="mouse-coords" id="mouse-coords">鼠标位置: 无</div>
|
||||
</div>
|
||||
|
||||
<div id="legend">
|
||||
<h3 style="color:#4db8ff; margin-bottom:10px; font-size:1.1rem;">图例</h3>
|
||||
<div class="legend-item">
|
||||
<div class="legend-color" style="background-color: #ff4444;"></div>
|
||||
<span>Americas II</span>
|
||||
</div>
|
||||
<div class="legend-item">
|
||||
<div class="legend-color" style="background-color: #44ff44;"></div>
|
||||
<span>AU Aleutian A</span>
|
||||
</div>
|
||||
<div class="legend-item">
|
||||
<div class="legend-color" style="background-color: #4444ff;"></div>
|
||||
<span>AU Aleutian B</span>
|
||||
</div>
|
||||
<div class="legend-item">
|
||||
<div class="legend-color" style="background-color: #ffff44;"></div>
|
||||
<span>其他电缆</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="earth-stats">
|
||||
<h3 style="color:#4db8ff; margin-bottom:10px; font-size:1.1rem;">地球信息</h3>
|
||||
<div class="stats-item">
|
||||
<span class="stats-label">电缆系统:</span>
|
||||
<span class="stats-value" id="cable-count">0个</span>
|
||||
</div>
|
||||
<div class="stats-item">
|
||||
<span class="stats-label">状态:</span>
|
||||
<span class="stats-value" id="cable-status-summary">-</span>
|
||||
</div>
|
||||
<div class="stats-item">
|
||||
<span class="stats-label">登陆点:</span>
|
||||
<span class="stats-value" id="landing-point-count">0个</span>
|
||||
</div>
|
||||
<div class="stats-item">
|
||||
<span class="stats-label">地形:</span>
|
||||
<span class="stats-value" id="terrain-status">开启</span>
|
||||
</div>
|
||||
<div class="stats-item">
|
||||
<span class="stats-label">卫星:</span>
|
||||
<span class="stats-value" id="satellite-count">0 颗</span>
|
||||
</div>
|
||||
<div class="stats-item">
|
||||
<span class="stats-label">视角距离:</span>
|
||||
<span class="stats-value" id="camera-distance">300 km</span>
|
||||
</div>
|
||||
<div class="stats-item">
|
||||
<span class="stats-label">纹理质量:</span>
|
||||
<span class="stats-value" id="texture-quality">8K 卫星图</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="loading">
|
||||
<div id="loading-spinner"></div>
|
||||
<div id="loading-title">正在初始化全球态势数据...</div>
|
||||
<div id="loading-subtitle" style="font-size:0.9rem; margin-top:10px; color:#aaa;">同步卫星、海底光缆与登陆点数据</div>
|
||||
</div>
|
||||
<div id="status-message" class="status-message" style="display: none;"></div>
|
||||
<div id="tooltip" class="tooltip"></div>
|
||||
</div>
|
||||
|
||||
<script type="module" src="js/main.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
Before Width: | Height: | Size: 4.4 MiB After Width: | Height: | Size: 4.4 MiB |
|
Before Width: | Height: | Size: 18 MiB After Width: | Height: | Size: 18 MiB |
6
frontend/public/earth/assets/icons/cables.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="12" cy="12" r="6.75" stroke="#4DB8FF" stroke-width="2.1"/>
|
||||
<path d="M5.75 12H18.25" stroke="#4DB8FF" stroke-width="2.1" stroke-linecap="round"/>
|
||||
<path d="M12 5.8C14.7 7.75 14.7 16.25 12 18.2" stroke="#4DB8FF" stroke-width="2.1" stroke-linecap="round"/>
|
||||
<path d="M8 16C9.95 14.2 14.05 14.2 16 16" stroke="#4DB8FF" stroke-width="2.1" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 480 B |
5
frontend/public/earth/assets/icons/info.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="12" cy="12" r="8" stroke="#4DB8FF" stroke-width="2.2"/>
|
||||
<path d="M12 10V16" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<circle cx="12" cy="7.25" r="1.25" fill="#4DB8FF"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 310 B |
10
frontend/public/earth/assets/icons/layout-collapse.svg
Normal file
@@ -0,0 +1,10 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M9 9L5 5" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M5 8V5H8" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M15 9L19 5" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M16 5H19V8" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9 15L5 19" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M5 16V19H8" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M15 15L19 19" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M16 19H19V16" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 871 B |
10
frontend/public/earth/assets/icons/layout.svg
Normal file
@@ -0,0 +1,10 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6 9.2L9.2 9.2L9.2 6" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M18 9.2L14.8 9.2L14.8 6" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 14.8L9.2 14.8L9.2 18" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M18 14.8L14.8 14.8L14.8 18" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 6L10 10" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M18 6L14 10" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M6 18L10 14" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M18 18L14 14" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 927 B |
4
frontend/public/earth/assets/icons/pause.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M9 6.25V17.75" stroke="#4DB8FF" stroke-width="2.4" stroke-linecap="round"/>
|
||||
<path d="M15 6.25V17.75" stroke="#4DB8FF" stroke-width="2.4" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 278 B |
3
frontend/public/earth/assets/icons/play.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.5 6.5L17.5 12L8.5 17.5V6.5Z" fill="#4DB8FF"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 163 B |
6
frontend/public/earth/assets/icons/reload.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6.8 12.6C6.8 8.95 9.75 6 13.4 6C14.9 6 16.24 6.46 17.3 7.28" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M18.85 10.45C19.2 11.15 19.4 11.95 19.4 12.8C19.4 16.45 16.45 19.4 12.8 19.4C10.05 19.4 7.69 17.72 6.7 15.33" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M15.9 5.95H19.2V9.25" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M19.2 5.95L16.7 8.45" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 631 B |
8
frontend/public/earth/assets/icons/reset-view.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="12" cy="12" r="5" stroke="#4DB8FF" stroke-width="2.2"/>
|
||||
<path d="M12 3V6.5" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M12 17.5V21" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M3 12H6.5" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M17.5 12H21" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<circle cx="12" cy="12" r="1.45" fill="#4DB8FF"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 561 B |
11
frontend/public/earth/assets/icons/satellite.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect x="10" y="10" width="4" height="4" rx="0.8" stroke="#4DB8FF" stroke-width="2"/>
|
||||
<rect x="4" y="9" width="4" height="6" rx="0.8" stroke="#4DB8FF" stroke-width="2"/>
|
||||
<rect x="16" y="9" width="4" height="6" rx="0.8" stroke="#4DB8FF" stroke-width="2"/>
|
||||
<path d="M8 12H10" stroke="#4DB8FF" stroke-width="2" stroke-linecap="round"/>
|
||||
<path d="M14 12H16" stroke="#4DB8FF" stroke-width="2" stroke-linecap="round"/>
|
||||
<path d="M12 8V6" stroke="#4DB8FF" stroke-width="2" stroke-linecap="round"/>
|
||||
<path d="M10.75 6H13.25" stroke="#4DB8FF" stroke-width="2" stroke-linecap="round"/>
|
||||
<path d="M12 14V18" stroke="#4DB8FF" stroke-width="2" stroke-linecap="round"/>
|
||||
<path d="M10.25 18H13.75" stroke="#4DB8FF" stroke-width="2" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 858 B |
4
frontend/public/earth/assets/icons/search.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="10.5" cy="10.5" r="5.75" stroke="#4DB8FF" stroke-width="2.2"/>
|
||||
<path d="M15.2 15.2L19.25 19.25" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 276 B |
5
frontend/public/earth/assets/icons/terrain.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3 18H21" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M4.5 18L9.5 11L12.5 15L16 9L19.5 18" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.25 18L13.05 15.35L14.55 17.25" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 449 B |
7
frontend/public/earth/assets/icons/trails.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5 17H12" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M7 13.5H15" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M10 10H16" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<circle cx="17.5" cy="8.5" r="2.2" fill="#4DB8FF"/>
|
||||
<path d="M15.8 10.2L18.55 7.45" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 501 B |
6
frontend/public/earth/assets/icons/zoom.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="10.5" cy="10.5" r="5.75" stroke="#4DB8FF" stroke-width="2.2"/>
|
||||
<path d="M15.25 15.25L19.25 19.25" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M10.5 8V13" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
<path d="M8 10.5H13" stroke="#4DB8FF" stroke-width="2.2" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 446 B |
@@ -13,18 +13,132 @@ body {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
:root {
|
||||
--hud-border: rgba(210, 237, 255, 0.32);
|
||||
--hud-border-hover: rgba(232, 246, 255, 0.48);
|
||||
--hud-border-active: rgba(245, 251, 255, 0.62);
|
||||
--glass-fill-top: rgba(255, 255, 255, 0.18);
|
||||
--glass-fill-bottom: rgba(115, 180, 255, 0.08);
|
||||
--glass-sheen: rgba(255, 255, 255, 0.34);
|
||||
--glass-shadow: 0 14px 30px rgba(0, 0, 0, 0.22);
|
||||
--glass-glow: 0 0 26px rgba(120, 200, 255, 0.16);
|
||||
}
|
||||
|
||||
@property --float-offset {
|
||||
syntax: '<length>';
|
||||
inherits: false;
|
||||
initial-value: 0px;
|
||||
}
|
||||
|
||||
#container {
|
||||
position: relative;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
/* user-select: none;
|
||||
-webkit-user-select: none; */
|
||||
}
|
||||
|
||||
#container.dragging {
|
||||
cursor: grabbing;
|
||||
}
|
||||
|
||||
/* Bottom Dock */
|
||||
#right-toolbar-group {
|
||||
position: absolute;
|
||||
bottom: 18px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 200;
|
||||
}
|
||||
|
||||
#right-toolbar-group,
|
||||
#info-panel,
|
||||
#coordinates-display,
|
||||
#legend,
|
||||
#earth-stats {
|
||||
transition:
|
||||
top 0.45s ease,
|
||||
right 0.45s ease,
|
||||
bottom 0.45s ease,
|
||||
left 0.45s ease,
|
||||
transform 0.45s ease,
|
||||
box-shadow 0.45s ease;
|
||||
}
|
||||
|
||||
#info-panel,
|
||||
#coordinates-display,
|
||||
#legend,
|
||||
#earth-stats,
|
||||
#satellite-info {
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
isolation: isolate;
|
||||
background:
|
||||
radial-gradient(circle at 24% 12%, rgba(255, 255, 255, 0.12), transparent 28%),
|
||||
radial-gradient(circle at 78% 115%, rgba(255, 255, 255, 0.06), transparent 32%),
|
||||
linear-gradient(180deg, rgba(255, 255, 255, 0.14), rgba(110, 176, 255, 0.06)),
|
||||
rgba(7, 18, 36, 0.28);
|
||||
border: 1px solid rgba(225, 242, 255, 0.2);
|
||||
box-shadow:
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.14),
|
||||
inset 0 -1px 0 rgba(255, 255, 255, 0.04),
|
||||
0 18px 40px rgba(0, 0, 0, 0.24),
|
||||
0 0 32px rgba(120, 200, 255, 0.12);
|
||||
backdrop-filter: blur(20px) saturate(145%);
|
||||
-webkit-backdrop-filter: blur(20px) saturate(145%);
|
||||
}
|
||||
|
||||
#info-panel::before,
|
||||
#coordinates-display::before,
|
||||
#legend::before,
|
||||
#earth-stats::before,
|
||||
#satellite-info::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 1px 1px 24% 1px;
|
||||
border-radius: inherit;
|
||||
background:
|
||||
linear-gradient(180deg, rgba(255, 255, 255, 0.18), rgba(255, 255, 255, 0.05) 26%, transparent 70%);
|
||||
opacity: 0.46;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
#info-panel::after,
|
||||
#coordinates-display::after,
|
||||
#legend::after,
|
||||
#earth-stats::after,
|
||||
#satellite-info::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: -1px;
|
||||
padding: 1.4px;
|
||||
border-radius: inherit;
|
||||
background:
|
||||
linear-gradient(135deg, rgba(255, 255, 255, 0.3), rgba(170, 223, 255, 0.2) 34%, rgba(88, 169, 255, 0.14) 68%, rgba(255, 255, 255, 0.24));
|
||||
opacity: 0.78;
|
||||
pointer-events: none;
|
||||
filter: url(#liquid-glass-distortion) blur(0.35px);
|
||||
-webkit-mask:
|
||||
linear-gradient(#000 0 0) content-box,
|
||||
linear-gradient(#000 0 0);
|
||||
-webkit-mask-composite: xor;
|
||||
mask:
|
||||
linear-gradient(#000 0 0) content-box,
|
||||
linear-gradient(#000 0 0);
|
||||
mask-composite: exclude;
|
||||
}
|
||||
|
||||
#info-panel > *,
|
||||
#coordinates-display > *,
|
||||
#legend > *,
|
||||
#earth-stats > *,
|
||||
#satellite-info > * {
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
#loading {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
@@ -106,16 +220,28 @@ input[type="range"]::-webkit-slider-thumb {
|
||||
.status-message {
|
||||
position: absolute;
|
||||
top: 20px;
|
||||
right: 20px;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -18px);
|
||||
background-color: rgba(10, 10, 30, 0.85);
|
||||
border-radius: 10px;
|
||||
padding: 10px 15px;
|
||||
z-index: 10;
|
||||
z-index: 210;
|
||||
box-shadow: 0 0 20px rgba(0, 150, 255, 0.3);
|
||||
border: 1px solid rgba(0, 150, 255, 0.2);
|
||||
font-size: 0.9rem;
|
||||
display: none;
|
||||
backdrop-filter: blur(5px);
|
||||
text-align: center;
|
||||
min-width: 180px;
|
||||
opacity: 0;
|
||||
transition:
|
||||
transform 0.28s ease,
|
||||
opacity 0.28s ease;
|
||||
}
|
||||
|
||||
.status-message.visible {
|
||||
transform: translate(-50%, 0);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.status-message.success {
|
||||
@@ -147,3 +273,513 @@ input[type="range"]::-webkit-slider-thumb {
|
||||
display: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
/* Floating toolbar dock */
|
||||
#control-toolbar {
|
||||
position: relative;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 0;
|
||||
background: transparent;
|
||||
border: none;
|
||||
box-shadow: none;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.toolbar-items {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
flex-wrap: nowrap;
|
||||
}
|
||||
|
||||
.floating-popover-group {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.floating-popover-group::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
bottom: 100%;
|
||||
transform: translateX(-50%);
|
||||
width: 56px;
|
||||
height: 16px;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.floating-popover-group > .stack-toolbar {
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
top: auto;
|
||||
right: auto;
|
||||
bottom: calc(100% + 12px);
|
||||
transform: translate(-50%, 10px);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
opacity: 0;
|
||||
visibility: hidden;
|
||||
pointer-events: none;
|
||||
transition:
|
||||
opacity 0.22s ease,
|
||||
transform 0.22s ease,
|
||||
visibility 0.22s ease;
|
||||
z-index: 220;
|
||||
}
|
||||
|
||||
.toolbar-btn.floating-btn {
|
||||
width: 42px;
|
||||
height: 42px;
|
||||
min-width: 42px;
|
||||
min-height: 42px;
|
||||
border-radius: 50%;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.liquid-glass-surface {
|
||||
--elastic-x: 0px;
|
||||
--elastic-y: 0px;
|
||||
--tilt-x: 0deg;
|
||||
--tilt-y: 0deg;
|
||||
--btn-scale: 1;
|
||||
--press-offset: 0px;
|
||||
--float-offset: 0px;
|
||||
--glow-opacity: 0.24;
|
||||
--glow-x: 50%;
|
||||
--glow-y: 22%;
|
||||
position: relative;
|
||||
isolation: isolate;
|
||||
transform-style: preserve-3d;
|
||||
overflow: hidden;
|
||||
background:
|
||||
radial-gradient(circle at var(--glow-x) var(--glow-y), rgba(255, 255, 255, 0.16), transparent 34%),
|
||||
radial-gradient(circle at 50% 118%, rgba(255, 255, 255, 0.08), transparent 30%),
|
||||
linear-gradient(180deg, var(--glass-fill-top), var(--glass-fill-bottom)),
|
||||
rgba(8, 20, 38, 0.22);
|
||||
border: 1px solid var(--hud-border);
|
||||
box-shadow:
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.14),
|
||||
inset 0 -1px 0 rgba(255, 255, 255, 0.05),
|
||||
var(--glass-shadow),
|
||||
var(--glass-glow);
|
||||
backdrop-filter: blur(18px) saturate(145%);
|
||||
-webkit-backdrop-filter: blur(18px) saturate(145%);
|
||||
transform:
|
||||
translate3d(var(--elastic-x), calc(var(--float-offset) + var(--press-offset) + var(--elastic-y)), 0)
|
||||
scale(var(--btn-scale));
|
||||
transition:
|
||||
transform 0.22s ease,
|
||||
box-shadow 0.22s ease,
|
||||
background 0.22s ease,
|
||||
opacity 0.18s ease,
|
||||
border-color 0.22s ease;
|
||||
animation: floatDock 3.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.liquid-glass-surface::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 1px 1px 18px 1px;
|
||||
border-radius: inherit;
|
||||
background:
|
||||
linear-gradient(180deg, rgba(255, 255, 255, 0.18), rgba(255, 255, 255, 0.05) 28%, transparent 68%);
|
||||
opacity: 0.5;
|
||||
pointer-events: none;
|
||||
transform:
|
||||
perspective(120px)
|
||||
rotateX(calc(var(--tilt-x) * 0.7))
|
||||
rotateY(calc(var(--tilt-y) * 0.7))
|
||||
translate3d(calc(var(--elastic-x) * 0.22), calc(var(--elastic-y) * 0.22), 0);
|
||||
transition: opacity 0.18s ease, transform 0.18s ease;
|
||||
}
|
||||
|
||||
.liquid-glass-surface::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: -1px;
|
||||
padding: 1.35px;
|
||||
border-radius: inherit;
|
||||
background:
|
||||
linear-gradient(135deg, rgba(255, 255, 255, 0.36), rgba(168, 222, 255, 0.22) 34%, rgba(96, 175, 255, 0.16) 66%, rgba(255, 255, 255, 0.28));
|
||||
opacity: 0.82;
|
||||
pointer-events: none;
|
||||
filter: url(#liquid-glass-distortion) blur(0.35px);
|
||||
transform:
|
||||
perspective(120px)
|
||||
rotateX(calc(var(--tilt-x) * 0.5))
|
||||
rotateY(calc(var(--tilt-y) * 0.5))
|
||||
translate3d(calc(var(--elastic-x) * 0.16), calc(var(--elastic-y) * 0.16), 0);
|
||||
-webkit-mask:
|
||||
linear-gradient(#000 0 0) content-box,
|
||||
linear-gradient(#000 0 0);
|
||||
-webkit-mask-composite: xor;
|
||||
mask:
|
||||
linear-gradient(#000 0 0) content-box,
|
||||
linear-gradient(#000 0 0);
|
||||
mask-composite: exclude;
|
||||
transition: opacity 0.18s ease, transform 0.18s ease;
|
||||
}
|
||||
|
||||
.toolbar-items > :nth-child(2n).floating-btn,
|
||||
.toolbar-items > :nth-child(2n) .floating-btn {
|
||||
animation-delay: 0.18s;
|
||||
}
|
||||
|
||||
.toolbar-items > :nth-child(3n).floating-btn,
|
||||
.toolbar-items > :nth-child(3n) .floating-btn {
|
||||
animation-delay: 0.34s;
|
||||
}
|
||||
|
||||
@keyframes floatDock {
|
||||
0%, 100% {
|
||||
--float-offset: 0px;
|
||||
}
|
||||
50% {
|
||||
--float-offset: -4px;
|
||||
}
|
||||
}
|
||||
|
||||
.toolbar-btn {
|
||||
position: relative;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
border: none;
|
||||
border-radius: 0;
|
||||
background: transparent;
|
||||
color: #4db8ff;
|
||||
font-size: 14px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
overflow: visible;
|
||||
appearance: none;
|
||||
-webkit-appearance: none;
|
||||
}
|
||||
|
||||
.toolbar-btn:not(.liquid-glass-surface)::after {
|
||||
content: none;
|
||||
}
|
||||
|
||||
.toolbar-btn .icon {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
transform: translateZ(0);
|
||||
transition: transform 0.16s ease, opacity 0.16s ease;
|
||||
backface-visibility: hidden;
|
||||
-webkit-backface-visibility: hidden;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.liquid-glass-surface:hover {
|
||||
--btn-scale: 1.035;
|
||||
--press-offset: -1px;
|
||||
--glow-opacity: 0.32;
|
||||
background:
|
||||
radial-gradient(circle at var(--glow-x) var(--glow-y), rgba(255, 255, 255, 0.18), transparent 34%),
|
||||
radial-gradient(circle at 50% 118%, rgba(255, 255, 255, 0.1), transparent 30%),
|
||||
linear-gradient(180deg, rgba(255, 255, 255, 0.18), rgba(128, 198, 255, 0.1)),
|
||||
rgba(8, 20, 38, 0.2);
|
||||
border-color: var(--hud-border-hover);
|
||||
box-shadow:
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.2),
|
||||
inset 0 -1px 0 rgba(255, 255, 255, 0.08),
|
||||
0 18px 36px rgba(0, 0, 0, 0.24),
|
||||
0 0 28px rgba(145, 214, 255, 0.22);
|
||||
}
|
||||
|
||||
.liquid-glass-surface:hover::before {
|
||||
opacity: 0.62;
|
||||
transform: scale(1.01);
|
||||
}
|
||||
|
||||
.liquid-glass-surface:hover::after {
|
||||
opacity: 0.96;
|
||||
transform: scale(1.01);
|
||||
}
|
||||
|
||||
.liquid-glass-surface:active,
|
||||
.liquid-glass-surface.is-pressed {
|
||||
--btn-scale: 0.942;
|
||||
--press-offset: 2px;
|
||||
--glow-opacity: 0.2;
|
||||
background:
|
||||
radial-gradient(circle at var(--glow-x) var(--glow-y), rgba(255, 255, 255, 0.24), transparent 34%),
|
||||
radial-gradient(circle at 50% 118%, rgba(255, 255, 255, 0.14), transparent 30%),
|
||||
linear-gradient(180deg, rgba(255, 255, 255, 0.24), rgba(146, 210, 255, 0.16)),
|
||||
rgba(10, 24, 44, 0.24);
|
||||
border-color: rgba(240, 249, 255, 0.58);
|
||||
box-shadow:
|
||||
inset 0 2px 10px rgba(0, 0, 0, 0.2),
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.16),
|
||||
0 4px 10px rgba(0, 0, 0, 0.18),
|
||||
0 0 14px rgba(176, 226, 255, 0.18);
|
||||
}
|
||||
|
||||
.liquid-glass-surface:active::before,
|
||||
.liquid-glass-surface.is-pressed::before {
|
||||
opacity: 0.46;
|
||||
transform: translateY(2px) scale(0.985);
|
||||
}
|
||||
|
||||
.liquid-glass-surface:active::after,
|
||||
.liquid-glass-surface.is-pressed::after {
|
||||
opacity: 0.78;
|
||||
transform: scale(0.985);
|
||||
}
|
||||
|
||||
.liquid-glass-surface:active .icon,
|
||||
.liquid-glass-surface.is-pressed .icon {
|
||||
transform: translateY(1.5px);
|
||||
}
|
||||
|
||||
.liquid-glass-surface:active img,
|
||||
.liquid-glass-surface.is-pressed img,
|
||||
.liquid-glass-surface:active .material-symbols-rounded,
|
||||
.liquid-glass-surface.is-pressed .material-symbols-rounded {
|
||||
transform: translateY(1.5px);
|
||||
transition: transform 0.16s ease, opacity 0.16s ease;
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar .zoom-btn:active,
|
||||
#zoom-control-group #zoom-toolbar .zoom-btn.is-pressed,
|
||||
#zoom-control-group #zoom-toolbar .zoom-percent:active,
|
||||
#zoom-control-group #zoom-toolbar .zoom-percent.is-pressed {
|
||||
letter-spacing: -0.01em;
|
||||
}
|
||||
|
||||
.liquid-glass-surface.active {
|
||||
background:
|
||||
radial-gradient(circle at var(--glow-x) var(--glow-y), rgba(255, 255, 255, 0.18), transparent 34%),
|
||||
linear-gradient(180deg, rgba(255, 255, 255, 0.2), rgba(118, 200, 255, 0.14)),
|
||||
rgba(11, 34, 58, 0.26);
|
||||
border-color: var(--hud-border-active);
|
||||
box-shadow:
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.22),
|
||||
inset 0 0 18px rgba(160, 220, 255, 0.14),
|
||||
0 18px 34px rgba(0, 0, 0, 0.24),
|
||||
0 0 30px rgba(145, 214, 255, 0.24);
|
||||
}
|
||||
|
||||
.toolbar-btn svg {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
stroke: currentColor;
|
||||
stroke-width: 2.1;
|
||||
fill: none;
|
||||
stroke-linecap: round;
|
||||
stroke-linejoin: round;
|
||||
}
|
||||
|
||||
.toolbar-btn .material-symbols-rounded {
|
||||
font-size: 21px;
|
||||
line-height: 1;
|
||||
font-variation-settings:
|
||||
'FILL' 0,
|
||||
'wght' 500,
|
||||
'GRAD' 0,
|
||||
'opsz' 24;
|
||||
color: currentColor;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
user-select: none;
|
||||
pointer-events: none;
|
||||
text-rendering: geometricPrecision;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
.toolbar-btn img {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
display: block;
|
||||
user-select: none;
|
||||
pointer-events: none;
|
||||
shape-rendering: geometricPrecision;
|
||||
image-rendering: -webkit-optimize-contrast;
|
||||
backface-visibility: hidden;
|
||||
-webkit-backface-visibility: hidden;
|
||||
}
|
||||
|
||||
#rotate-toggle .icon-play,
|
||||
#rotate-toggle.is-stopped .icon-pause,
|
||||
#layout-toggle .layout-collapse,
|
||||
#layout-toggle.active .layout-expand {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#rotate-toggle.is-stopped .icon-play,
|
||||
#layout-toggle.active .layout-collapse {
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
#zoom-control-group:hover #zoom-toolbar,
|
||||
#zoom-control-group:focus-within #zoom-toolbar,
|
||||
#zoom-control-group.open #zoom-toolbar,
|
||||
#info-control-group:hover #info-toolbar,
|
||||
#info-control-group:focus-within #info-toolbar,
|
||||
#info-control-group.open #info-toolbar {
|
||||
opacity: 1;
|
||||
visibility: visible;
|
||||
pointer-events: auto;
|
||||
transform: translate(-50%, 0);
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar .zoom-percent {
|
||||
min-width: 0;
|
||||
width: 42px;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 42px;
|
||||
padding: 0;
|
||||
font-size: 0.68rem;
|
||||
border-radius: 50%;
|
||||
color: #4db8ff;
|
||||
animation: floatDock 3.8s ease-in-out infinite;
|
||||
animation-delay: 0.18s;
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar .zoom-percent:hover {
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar,
|
||||
#info-control-group #info-toolbar {
|
||||
top: auto;
|
||||
right: auto;
|
||||
left: 50%;
|
||||
bottom: calc(100% + 12px);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
#info-toolbar .toolbar-btn:nth-child(1) {
|
||||
animation-delay: 0.34s;
|
||||
}
|
||||
|
||||
#info-toolbar .toolbar-btn:nth-child(2) {
|
||||
animation-delay: 0.18s;
|
||||
}
|
||||
|
||||
#info-toolbar .toolbar-btn:nth-child(3) {
|
||||
animation-delay: 0.1s;
|
||||
}
|
||||
|
||||
#info-toolbar .toolbar-btn:nth-child(4) {
|
||||
animation-delay: 0s;
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar .zoom-btn {
|
||||
width: 42px;
|
||||
height: 42px;
|
||||
min-width: 42px;
|
||||
border-radius: 50%;
|
||||
color: #4db8ff;
|
||||
animation: floatDock 3.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
#zoom-toolbar .zoom-btn:nth-child(1) {
|
||||
animation-delay: 0s;
|
||||
}
|
||||
|
||||
#zoom-toolbar .zoom-btn:nth-child(3) {
|
||||
animation-delay: 0.34s;
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar .zoom-btn:hover {
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar .zoom-btn:active,
|
||||
#zoom-control-group #zoom-toolbar .zoom-percent:active {
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar .tooltip {
|
||||
bottom: calc(100% + 10px);
|
||||
}
|
||||
|
||||
#zoom-control-group #zoom-toolbar .tooltip::after {
|
||||
top: 100%;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
border: 6px solid transparent;
|
||||
border-top-color: rgba(77, 184, 255, 0.4);
|
||||
}
|
||||
|
||||
#container.layout-expanded #info-panel {
|
||||
top: 20px;
|
||||
left: 20px;
|
||||
transform: translate(calc(-100% + 20px), calc(-100% + 20px));
|
||||
}
|
||||
|
||||
#container.layout-expanded #coordinates-display {
|
||||
top: 20px;
|
||||
right: 20px;
|
||||
transform: translate(calc(100% - 20px), calc(-100% + 20px));
|
||||
}
|
||||
|
||||
#container.layout-expanded #legend {
|
||||
left: 20px;
|
||||
bottom: 20px;
|
||||
transform: translate(calc(-100% + 20px), calc(100% - 20px));
|
||||
}
|
||||
|
||||
#container.layout-expanded #earth-stats {
|
||||
right: 20px;
|
||||
bottom: 20px;
|
||||
transform: translate(calc(100% - 20px), calc(100% - 20px));
|
||||
}
|
||||
|
||||
#container.layout-expanded #right-toolbar-group {
|
||||
bottom: 18px;
|
||||
transform: translateX(-50%);
|
||||
}
|
||||
|
||||
.toolbar-btn .tooltip {
|
||||
position: absolute;
|
||||
bottom: 56px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: rgba(10, 10, 30, 0.95);
|
||||
color: #fff;
|
||||
padding: 6px 12px;
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
white-space: nowrap;
|
||||
opacity: 0;
|
||||
visibility: hidden;
|
||||
transition: all 0.2s ease;
|
||||
border: 1px solid rgba(77, 184, 255, 0.4);
|
||||
pointer-events: none;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
.toolbar-btn:hover .tooltip,
|
||||
.floating-popover-group:hover > .toolbar-btn .tooltip,
|
||||
.floating-popover-group:focus-within > .toolbar-btn .tooltip {
|
||||
opacity: 1;
|
||||
visibility: visible;
|
||||
bottom: 58px;
|
||||
}
|
||||
|
||||
.toolbar-btn .tooltip::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
border: 6px solid transparent;
|
||||
border-top-color: rgba(77, 184, 255, 0.4);
|
||||
}
|
||||
|
||||
@@ -1,18 +1,13 @@
|
||||
/* coordinates-display */
|
||||
|
||||
#coordinates-display {
|
||||
position: absolute;
|
||||
top: 20px;
|
||||
right: 250px;
|
||||
background-color: rgba(10, 10, 30, 0.85);
|
||||
border-radius: 10px;
|
||||
right: 20px;
|
||||
border-radius: 18px;
|
||||
padding: 10px 15px;
|
||||
z-index: 10;
|
||||
box-shadow: 0 0 20px rgba(0, 150, 255, 0.3);
|
||||
border: 1px solid rgba(0, 150, 255, 0.2);
|
||||
font-size: 0.9rem;
|
||||
min-width: 180px;
|
||||
backdrop-filter: blur(5px);
|
||||
}
|
||||
|
||||
#coordinates-display .coord-item {
|
||||
|
||||
@@ -1,18 +1,13 @@
|
||||
/* earth-stats */
|
||||
|
||||
#earth-stats {
|
||||
position: absolute;
|
||||
bottom: 20px;
|
||||
right: 20px;
|
||||
background-color: rgba(10, 10, 30, 0.85);
|
||||
border-radius: 10px;
|
||||
border-radius: 18px;
|
||||
padding: 15px;
|
||||
width: 250px;
|
||||
z-index: 10;
|
||||
box-shadow: 0 0 20px rgba(0, 150, 255, 0.3);
|
||||
border: 1px solid rgba(0, 150, 255, 0.2);
|
||||
font-size: 0.9rem;
|
||||
backdrop-filter: blur(5px);
|
||||
}
|
||||
|
||||
#earth-stats .stats-item {
|
||||
@@ -29,3 +24,28 @@
|
||||
color: #4db8ff;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
#satellite-info {
|
||||
bottom: 20px;
|
||||
right: 290px;
|
||||
border-radius: 18px;
|
||||
padding: 15px;
|
||||
width: 220px;
|
||||
z-index: 10;
|
||||
font-size: 0.85rem;
|
||||
}
|
||||
|
||||
#satellite-info .stats-item {
|
||||
margin-bottom: 6px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
#satellite-info .stats-label {
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
#satellite-info .stats-value {
|
||||
color: #00e5ff;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
/* info-panel */
|
||||
|
||||
#info-panel {
|
||||
position: absolute;
|
||||
top: 20px;
|
||||
left: 20px;
|
||||
background-color: rgba(10, 10, 30, 0.85);
|
||||
border-radius: 10px;
|
||||
border-radius: 18px;
|
||||
padding: 20px;
|
||||
width: 320px;
|
||||
z-index: 10;
|
||||
box-shadow: 0 0 20px rgba(0, 150, 255, 0.3);
|
||||
border: 1px solid rgba(0, 150, 255, 0.2);
|
||||
backdrop-filter: blur(5px);
|
||||
}
|
||||
|
||||
#info-panel h1 {
|
||||
@@ -19,14 +14,34 @@
|
||||
margin-bottom: 5px;
|
||||
color: #4db8ff;
|
||||
text-shadow: 0 0 10px rgba(77, 184, 255, 0.5);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
#info-panel .subtitle {
|
||||
color: #aaa;
|
||||
margin-bottom: 20px;
|
||||
font-size: 0.9rem;
|
||||
border-bottom: 1px solid rgba(255,255,255,0.1);
|
||||
padding-bottom: 10px;
|
||||
padding-bottom: 12px;
|
||||
text-align: center;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
#info-panel .subtitle-main {
|
||||
color: #d7e7f5;
|
||||
font-size: 0.95rem;
|
||||
line-height: 1.35;
|
||||
font-weight: 500;
|
||||
letter-spacing: 0.02em;
|
||||
}
|
||||
|
||||
#info-panel .subtitle-meta {
|
||||
color: #8ea5bc;
|
||||
font-size: 0.74rem;
|
||||
line-height: 1.3;
|
||||
letter-spacing: 0.08em;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
#info-panel .cable-info {
|
||||
@@ -95,11 +110,184 @@
|
||||
|
||||
#info-panel .zoom-buttons {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 15px;
|
||||
margin-top: 10px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#info-panel .zoom-percent-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
#info-panel .zoom-percent {
|
||||
font-size: 1.4rem;
|
||||
font-weight: 600;
|
||||
color: #4db8ff;
|
||||
min-width: 70px;
|
||||
text-align: center;
|
||||
cursor: pointer;
|
||||
padding: 5px 10px;
|
||||
border-radius: 5px;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
#info-panel .zoom-percent:hover {
|
||||
background: rgba(77, 184, 255, 0.2);
|
||||
box-shadow: 0 0 10px rgba(77, 184, 255, 0.3);
|
||||
}
|
||||
|
||||
#info-panel .zoom-buttons .zoom-btn {
|
||||
width: 36px;
|
||||
height: 36px;
|
||||
min-width: 36px;
|
||||
border: none;
|
||||
border-radius: 50%;
|
||||
background: rgba(77, 184, 255, 0.2);
|
||||
color: #4db8ff;
|
||||
font-size: 22px;
|
||||
font-weight: bold;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
transition: all 0.2s ease;
|
||||
padding: 0;
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
|
||||
#info-panel .zoom-buttons .zoom-btn:hover {
|
||||
background: rgba(77, 184, 255, 0.4);
|
||||
transform: scale(1.1);
|
||||
box-shadow: 0 0 10px rgba(77, 184, 255, 0.5);
|
||||
}
|
||||
|
||||
#info-panel .zoom-buttons button {
|
||||
flex: 1;
|
||||
min-width: 60px;
|
||||
}
|
||||
|
||||
/* Info Card - Unified details panel (inside info-panel) */
|
||||
.info-card {
|
||||
margin-top: 15px;
|
||||
background:
|
||||
linear-gradient(180deg, rgba(255, 255, 255, 0.08), rgba(110, 176, 255, 0.04)),
|
||||
rgba(7, 18, 36, 0.2);
|
||||
border-radius: 14px;
|
||||
border: 1px solid rgba(225, 242, 255, 0.12);
|
||||
box-shadow:
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.08),
|
||||
0 10px 24px rgba(0, 0, 0, 0.16);
|
||||
padding: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.info-card.no-border {
|
||||
background: transparent;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.info-card-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 10px 12px;
|
||||
background: linear-gradient(180deg, rgba(255, 255, 255, 0.09), rgba(77, 184, 255, 0.06));
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.info-card-icon {
|
||||
font-size: 18px;
|
||||
}
|
||||
|
||||
.info-card-header h3 {
|
||||
flex: 1;
|
||||
margin: 0;
|
||||
font-size: 1rem;
|
||||
color: #4db8ff;
|
||||
}
|
||||
|
||||
#info-card-content {
|
||||
padding: 10px 12px;
|
||||
max-height: 40vh;
|
||||
overflow-y: auto;
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: rgba(160, 220, 255, 0.45) transparent;
|
||||
}
|
||||
|
||||
#info-card-content::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
}
|
||||
|
||||
#info-card-content::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#info-card-content::-webkit-scrollbar-thumb {
|
||||
background: linear-gradient(180deg, rgba(210, 237, 255, 0.32), rgba(110, 176, 255, 0.34));
|
||||
border-radius: 999px;
|
||||
}
|
||||
|
||||
#info-card-content::-webkit-scrollbar-thumb:hover {
|
||||
background: linear-gradient(180deg, rgba(232, 246, 255, 0.42), rgba(128, 198, 255, 0.46));
|
||||
}
|
||||
|
||||
.info-card-property {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
padding: 6px;
|
||||
border-bottom: 1px solid rgba(255, 255, 255, 0.05);
|
||||
}
|
||||
|
||||
.info-card-property:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.info-card-label {
|
||||
color: #aaa;
|
||||
font-size: 0.85rem;
|
||||
cursor: pointer;
|
||||
transition: color 0.18s ease;
|
||||
}
|
||||
|
||||
.info-card-label:hover {
|
||||
color: #d9f1ff;
|
||||
}
|
||||
|
||||
.info-card-value {
|
||||
color: #4db8ff;
|
||||
font-weight: 500;
|
||||
font-size: 0.9rem;
|
||||
text-align: right;
|
||||
max-width: 180px;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
/* Cable type */
|
||||
.info-card.cable {
|
||||
border-color: rgba(255, 200, 0, 0.4);
|
||||
}
|
||||
|
||||
.info-card.cable .info-card-header {
|
||||
background: rgba(255, 200, 0, 0.15);
|
||||
}
|
||||
|
||||
.info-card.cable .info-card-header h3 {
|
||||
color: #ffc800;
|
||||
}
|
||||
|
||||
/* Satellite type */
|
||||
.info-card.satellite {
|
||||
border-color: rgba(0, 229, 255, 0.4);
|
||||
}
|
||||
|
||||
.info-card.satellite .info-card-header {
|
||||
background: rgba(0, 229, 255, 0.15);
|
||||
}
|
||||
|
||||
.info-card.satellite .info-card-header h3 {
|
||||
color: #00e5ff;
|
||||
}
|
||||
|
||||
@@ -1,28 +1,59 @@
|
||||
/* legend */
|
||||
|
||||
#legend {
|
||||
position: absolute;
|
||||
bottom: 20px;
|
||||
left: 20px;
|
||||
background-color: rgba(10, 10, 30, 0.85);
|
||||
border-radius: 10px;
|
||||
border-radius: 18px;
|
||||
padding: 15px;
|
||||
width: 220px;
|
||||
z-index: 10;
|
||||
box-shadow: 0 0 20px rgba(0, 150, 255, 0.3);
|
||||
border: 1px solid rgba(0, 150, 255, 0.2);
|
||||
backdrop-filter: blur(5px);
|
||||
}
|
||||
|
||||
#legend .legend-title {
|
||||
color: #4db8ff;
|
||||
margin-bottom: 10px;
|
||||
font-size: 1.1rem;
|
||||
}
|
||||
|
||||
#legend .legend-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
max-height: 202px;
|
||||
overflow-y: auto;
|
||||
padding-right: 4px;
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: rgba(160, 220, 255, 0.4) transparent;
|
||||
}
|
||||
|
||||
#legend .legend-list::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
}
|
||||
|
||||
#legend .legend-list::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#legend .legend-list::-webkit-scrollbar-thumb {
|
||||
background: linear-gradient(180deg, rgba(210, 237, 255, 0.28), rgba(110, 176, 255, 0.34));
|
||||
border-radius: 999px;
|
||||
}
|
||||
|
||||
#legend .legend-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
margin-bottom: 8px;
|
||||
padding: 6px 8px;
|
||||
border-radius: 10px;
|
||||
background: linear-gradient(180deg, rgba(255, 255, 255, 0.05), rgba(120, 180, 255, 0.02));
|
||||
border: 1px solid rgba(225, 242, 255, 0.06);
|
||||
}
|
||||
|
||||
#legend .legend-color {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
border-radius: 3px;
|
||||
border-radius: 6px;
|
||||
margin-right: 10px;
|
||||
box-shadow:
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.2),
|
||||
0 0 12px rgba(77, 184, 255, 0.18);
|
||||
}
|
||||
|
||||