Initial commit

This commit is contained in:
wanghep
2026-03-20 21:47:30 +08:00
commit 2eab960303
83 changed files with 51694 additions and 0 deletions

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,22 @@
import json
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Any
class JsonRepository:
def __init__(self, path: Path) -> None:
self.path = path
def read(self, default: dict[str, Any] | None = None) -> dict[str, Any]:
if not self.path.exists():
return default or {}
with self.path.open("r", encoding="utf-8") as handle:
return json.load(handle)
def write(self, payload: dict[str, Any]) -> None:
self.path.parent.mkdir(parents=True, exist_ok=True)
with NamedTemporaryFile("w", delete=False, dir=self.path.parent, encoding="utf-8") as temp:
json.dump(payload, temp, ensure_ascii=False, indent=2)
temp_path = Path(temp.name)
temp_path.replace(self.path)

View File

@ -0,0 +1,196 @@
from pathlib import Path
from typing import Any
from app.core.config import (
DAILY_STATS_DIR,
MINUTE_SNAPSHOTS_DIR,
PUSH_RECORDS_DIR,
RAW_PAYLOADS_DIR,
SOURCE_DIAGNOSTICS_FILE,
SYSTEM_CONFIG_FILE,
)
from app.repositories.json_repository import JsonRepository
from app.repositories.mysql_repository import MySQLRepository
class MonitoringRepository:
def __init__(self) -> None:
self.system_config_repo = JsonRepository(SYSTEM_CONFIG_FILE)
self.source_diagnostics_repo = JsonRepository(SOURCE_DIAGNOSTICS_FILE)
self.history_repo = JsonRepository(DAILY_STATS_DIR / "summary.json")
self.push_records_repo = JsonRepository(PUSH_RECORDS_DIR / "records.json")
self._mysql_repository: MySQLRepository | None = None
def _get_bootstrap_config(self) -> dict[str, Any]:
return self.system_config_repo.read({})
def _should_use_mysql(self) -> bool:
config = self._get_bootstrap_config()
return bool(
config.get("storage_backend") == "mysql"
and config.get("mysql_enabled")
and config.get("mysql_host")
and config.get("mysql_database")
and config.get("mysql_username")
)
def _mysql(self) -> MySQLRepository | None:
if not self._should_use_mysql():
return None
if self._mysql_repository is None:
self._mysql_repository = MySQLRepository(self._get_bootstrap_config())
return self._mysql_repository
def get_system_config(self) -> dict:
mysql = self._mysql()
if mysql is None:
return self.system_config_repo.read()
payload = mysql.read_document("system_config", "default", self.system_config_repo.read({}))
if payload:
return payload
fallback = self.system_config_repo.read({})
if fallback:
mysql.write_document("system_config", "default", fallback)
return fallback
def save_system_config(self, payload: dict) -> None:
self.system_config_repo.write(payload)
mysql = self._mysql()
if mysql is not None:
mysql.write_document("system_config", "default", payload)
def get_source_diagnostics(self) -> dict:
mysql = self._mysql()
if mysql is None:
return self.source_diagnostics_repo.read()
payload = mysql.read_document("source_diagnostics", "default", self.source_diagnostics_repo.read({}))
if payload:
return payload
fallback = self.source_diagnostics_repo.read({})
if fallback:
mysql.write_document("source_diagnostics", "default", fallback)
return fallback
def save_source_diagnostics(self, payload: dict) -> None:
self.source_diagnostics_repo.write(payload)
mysql = self._mysql()
if mysql is not None:
mysql.write_document("source_diagnostics", "default", payload)
def get_snapshot_by_trade_date(self, trade_date: str) -> dict:
mysql = self._mysql()
if mysql is not None:
payload = mysql.read_document("minute_snapshot", trade_date, {})
if payload:
return payload
path = MINUTE_SNAPSHOTS_DIR / f"{trade_date}.json"
return JsonRepository(path).read({})
def get_latest_snapshot(self) -> dict:
mysql = self._mysql()
if mysql is not None:
rows = mysql.list_documents("minute_snapshot", limit=1)
if rows:
return rows[0]
files = sorted(MINUTE_SNAPSHOTS_DIR.glob("*.json"))
if not files:
return {}
return JsonRepository(files[-1]).read()
def save_snapshot(self, trade_date: str, payload: dict) -> None:
JsonRepository(MINUTE_SNAPSHOTS_DIR / f"{trade_date}.json").write(payload)
mysql = self._mysql()
if mysql is not None:
mysql.write_document("minute_snapshot", trade_date, payload, sort_value=trade_date)
def get_history(self) -> dict:
mysql = self._mysql()
if mysql is None:
return self.history_repo.read()
payload = mysql.read_document("history_summary", "default", self.history_repo.read({}))
if payload:
return payload
fallback = self.history_repo.read({})
if fallback:
mysql.write_document("history_summary", "default", fallback)
return fallback
def save_history(self, payload: dict) -> None:
self.history_repo.write(payload)
mysql = self._mysql()
if mysql is not None:
mysql.write_document("history_summary", "default", payload)
def get_push_records(self) -> dict:
mysql = self._mysql()
if mysql is not None:
records = mysql.list_documents("push_record")
if records:
return {"records": records}
return self.push_records_repo.read({"records": []})
def save_push_records(self, payload: dict) -> None:
self.push_records_repo.write(payload)
mysql = self._mysql()
if mysql is not None:
for record in payload.get("records", []):
mysql.write_document(
"push_record",
record["id"],
record,
sort_value=record.get("triggered_at"),
)
def append_push_record(self, record: dict) -> dict:
payload = self.get_push_records()
records = payload.get("records", [])
records.insert(0, record)
payload["records"] = records
self.save_push_records(payload)
return record
def get_alert_state(self, trade_date: str) -> dict:
mysql = self._mysql()
if mysql is not None:
payload = mysql.read_document("alert_state", trade_date, {})
if payload:
return payload
return {}
def save_alert_state(self, trade_date: str, payload: dict) -> None:
mysql = self._mysql()
if mysql is not None:
mysql.write_document("alert_state", trade_date, payload, sort_value=trade_date)
def save_raw_payload(self, name: str, payload: dict) -> Path:
path = RAW_PAYLOADS_DIR / f"{name}.json"
JsonRepository(path).write(payload)
mysql = self._mysql()
if mysql is not None:
mysql.write_document("raw_payload", name, payload, sort_value=name)
return path
def get_document(self, category: str, doc_key: str, default: dict | None = None) -> dict:
mysql = self._mysql()
if mysql is not None:
payload = mysql.read_document(category, doc_key, default or {})
if payload:
return payload
return default or {}
def save_document(self, category: str, doc_key: str, payload: dict, *, sort_value: str | None = None) -> None:
mysql = self._mysql()
if mysql is not None:
mysql.write_document(category, doc_key, payload, sort_value=sort_value)
def list_documents(
self,
category: str,
*,
limit: int | None = None,
descending: bool = True,
) -> list[dict]:
mysql = self._mysql()
if mysql is None:
return []
return mysql.list_documents(category, limit=limit, descending=descending)

View File

@ -0,0 +1,95 @@
import json
from datetime import datetime
from typing import Any
import pymysql
class MySQLRepository:
def __init__(self, config: dict[str, Any]) -> None:
self.config = config
self._ensure_schema()
def _connect(self):
return pymysql.connect(
host=self.config["mysql_host"],
port=int(self.config.get("mysql_port", 3306)),
user=self.config["mysql_username"],
password=self.config["mysql_password"],
database=self.config["mysql_database"],
charset=self.config.get("mysql_charset", "utf8mb4"),
autocommit=True,
cursorclass=pymysql.cursors.DictCursor,
)
def _ensure_schema(self) -> None:
statements = [
"""
CREATE TABLE IF NOT EXISTS app_documents (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
category VARCHAR(64) NOT NULL,
doc_key VARCHAR(128) NOT NULL,
sort_value VARCHAR(64) DEFAULT NULL,
payload LONGTEXT NOT NULL,
created_at DATETIME NOT NULL,
updated_at DATETIME NOT NULL,
UNIQUE KEY uniq_category_key (category, doc_key),
KEY idx_category_sort (category, sort_value)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
"""
]
with self._connect() as connection:
with connection.cursor() as cursor:
for statement in statements:
cursor.execute(statement)
def read_document(self, category: str, doc_key: str, default: dict[str, Any] | None = None) -> dict[str, Any]:
sql = "SELECT payload FROM app_documents WHERE category=%s AND doc_key=%s LIMIT 1"
with self._connect() as connection:
with connection.cursor() as cursor:
cursor.execute(sql, (category, doc_key))
row = cursor.fetchone()
if not row:
return default or {}
return json.loads(row["payload"])
def write_document(
self,
category: str,
doc_key: str,
payload: dict[str, Any],
*,
sort_value: str | None = None,
) -> None:
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
sql = """
INSERT INTO app_documents (category, doc_key, sort_value, payload, created_at, updated_at)
VALUES (%s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
sort_value=VALUES(sort_value),
payload=VALUES(payload),
updated_at=VALUES(updated_at)
"""
serialized = json.dumps(payload, ensure_ascii=False)
with self._connect() as connection:
with connection.cursor() as cursor:
cursor.execute(sql, (category, doc_key, sort_value, serialized, now, now))
def list_documents(
self,
category: str,
*,
limit: int | None = None,
descending: bool = True,
) -> list[dict[str, Any]]:
direction = "DESC" if descending else "ASC"
sql = f"SELECT payload FROM app_documents WHERE category=%s ORDER BY sort_value {direction}, updated_at {direction}"
params: list[Any] = [category]
if limit is not None:
sql += " LIMIT %s"
params.append(limit)
with self._connect() as connection:
with connection.cursor() as cursor:
cursor.execute(sql, params)
rows = cursor.fetchall()
return [json.loads(row["payload"]) for row in rows]