Files
zjjk/backend/app/services/eastmoney_sync_service.py
2026-03-20 21:47:30 +08:00

355 lines
15 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from __future__ import annotations
from collections import defaultdict
from datetime import datetime
from zoneinfo import ZoneInfo
from app.clients.eastmoney_client import EastmoneyClient
from app.core.config import HISTORY_START_DATE
from app.repositories.monitoring_repository import MonitoringRepository
from app.services.alert_engine import alert_engine
from app.services.market_clock import get_market_state
SOUTHBOUND_BENCHMARKS = [
{
"key": "hsi",
"label": "恒生指数",
"secid": "100.HSI",
"unit": "",
"detail_url": "https://quote.eastmoney.com/gb/zsHSI.html",
},
{
"key": "hstech",
"label": "恒生科技指数",
"secid": "124.HSTECH",
"unit": "",
"detail_url": "https://quote.eastmoney.com/gb/zsHSTECH.html",
},
]
HISTORY_BENCHMARKS = [
{
"key": "hstech_daily",
"label": "恒生科技指数",
"secid": "124.HSTECH",
"period": "daily",
}
]
class EastmoneySyncService:
def __init__(self) -> None:
self.client = EastmoneyClient()
self.repository = MonitoringRepository()
self.tz = ZoneInfo("Asia/Shanghai")
@staticmethod
def _wan_to_yi(value: float | int | None) -> float | None:
if value is None:
return None
return round(float(value) / 10000, 4)
@staticmethod
def _million_to_yi(value: float | int | None) -> float | None:
if value is None:
return None
return round(float(value) / 100, 4)
@staticmethod
def _safe_float(value: str | float | int | None) -> float | None:
if value in (None, "", "-"):
return None
return float(value)
def _determine_precision(self, trade_date: str) -> str:
now = datetime.now(self.tz)
if trade_date != now.date().isoformat():
return "historical_exact"
if now.hour > 16 or (now.hour == 16 and now.minute >= 10):
return "close_final"
return "realtime_exact"
def _parse_timeline(
self, trade_date: str, raw: list[str], precision: str
) -> tuple[list[dict], float | None, float | None]:
timeline: list[dict] = []
for item in raw:
parts = item.split(",")
if len(parts) < 10:
continue
total_net_buy = self._wan_to_yi(self._safe_float(parts[5]))
timeline.append(
{
"timestamp": f"{trade_date}T{parts[0]}:00+08:00",
"amount_hkd_billion": total_net_buy,
"precision": precision,
}
)
meaningful = [point for point in timeline if point["amount_hkd_billion"] not in (None, 0.0)]
if precision == "close_final" and not meaningful:
return [], None, None
amounts = [point["amount_hkd_billion"] for point in timeline if point["amount_hkd_billion"] is not None]
if len(amounts) < 2:
return timeline, None, None
one_min_change = round(amounts[-1] - amounts[-2], 4)
five_min_change = (
round(amounts[-1] - amounts[max(0, len(amounts) - 6)], 4)
if len(amounts) >= 6
else None
)
return timeline, one_min_change, five_min_change
def _parse_benchmark_trends(self, payload: dict, *, detail_url: str | None) -> dict:
data = payload.get("data") or {}
points: list[dict] = []
for item in data.get("trends") or []:
parts = item.split(",")
if len(parts) < 2:
continue
points.append(
{
"timestamp": f"{parts[0]}:00+08:00",
"value": self._safe_float(parts[1]),
}
)
return {
"key": data.get("code", ""),
"label": data.get("name", ""),
"unit": "",
"detail_url": detail_url,
"points": points,
}
def _build_benchmark_series(self) -> tuple[list[dict], dict[str, dict]]:
series: list[dict] = []
raw_payloads: dict[str, dict] = {}
for benchmark in SOUTHBOUND_BENCHMARKS:
response = self.client.fetch_stock_trends(benchmark["secid"], ndays=1)
raw_payloads[benchmark["key"]] = response
parsed = self._parse_benchmark_trends(response, detail_url=benchmark["detail_url"])
parsed["key"] = benchmark["key"]
parsed["label"] = benchmark["label"]
parsed["unit"] = benchmark["unit"]
series.append(parsed)
return series, raw_payloads
def _build_snapshot(self, realtime_payload: dict, timeline_payload: dict, threshold_step: float) -> dict:
south_sh = realtime_payload["data"]["sh2hk"]
south_sz = realtime_payload["data"]["sz2hk"]
trade_date = south_sh["date2"]
precision = self._determine_precision(trade_date)
updated_at = datetime.now(self.tz).isoformat(timespec="seconds")
sh_net_buy = self._wan_to_yi(south_sh.get("netBuyAmt"))
sz_net_buy = self._wan_to_yi(south_sz.get("netBuyAmt"))
total_net_buy = round((sh_net_buy or 0) + (sz_net_buy or 0), 4)
total_buy_amt = round(
(self._wan_to_yi(south_sh.get("buyAmt")) or 0) + (self._wan_to_yi(south_sz.get("buyAmt")) or 0),
4,
)
total_sell_amt = round(
(self._wan_to_yi(south_sh.get("sellAmt")) or 0) + (self._wan_to_yi(south_sz.get("sellAmt")) or 0),
4,
)
minute_timeline, one_min_change, five_min_change = self._parse_timeline(
trade_date,
timeline_payload.get("data", {}).get("n2s", []),
"realtime_exact" if precision == "realtime_exact" else precision,
)
benchmark_series, benchmark_raw_payloads = self._build_benchmark_series()
threshold_progress = 0.0 if threshold_step <= 0 else round((total_net_buy % threshold_step) / threshold_step, 4)
next_threshold = threshold_step if total_net_buy <= 0 else (int(total_net_buy // threshold_step) + 1) * threshold_step
return {
"trade_date": trade_date,
"snapshot_time": updated_at,
"market_state": get_market_state(),
"total_net_inflow": total_net_buy,
"cumulative_net_inflow": total_net_buy,
"shanghai_net_inflow": sh_net_buy,
"shenzhen_net_inflow": sz_net_buy,
"buy_amount": total_buy_amt,
"sell_amount": total_sell_amt,
"net_buy_amount": total_net_buy,
"one_min_change": one_min_change,
"five_min_change": five_min_change,
"precision": precision,
"source_name": "东方财富",
"source_url": "https://push2.eastmoney.com/api/qt/kamt/get",
"updated_at": updated_at,
"unavailable_reason": None,
"threshold_progress": threshold_progress,
"next_threshold_hkd_billion": next_threshold,
"minute_timeline": minute_timeline,
"benchmark_series": benchmark_series,
}, benchmark_raw_payloads
def _aggregate_history(self, rows: list[dict], start_date: str) -> dict:
grouped: dict[str, dict] = defaultdict(lambda: {"trade_date": "", "net": 0.0})
for row in rows:
trade_date = row["TRADE_DATE"][:10]
bucket = grouped[trade_date]
bucket["trade_date"] = trade_date
bucket["net"] += self._million_to_yi(row.get("NET_DEAL_AMT")) or 0.0
ordered = [grouped[key] for key in sorted(grouped.keys()) if key >= start_date]
daily = [{"period": item["trade_date"], "amount_hkd_billion": round(item["net"], 4)} for item in ordered]
weekly_map: dict[str, float] = defaultdict(float)
monthly_map: dict[str, float] = defaultdict(float)
cumulative: list[dict] = []
cumulative_total = 0.0
recent_trade_days: list[dict] = []
streak_in = 0
streak_out = 0
longest_in = 0
longest_out = 0
for item in ordered:
date_obj = datetime.strptime(item["trade_date"], "%Y-%m-%d")
week_key = f"{date_obj.strftime('%Y')}-W{date_obj.strftime('%W')}"
month_key = date_obj.strftime("%Y-%m")
weekly_map[week_key] += item["net"]
monthly_map[month_key] += item["net"]
cumulative_total += item["net"]
cumulative.append({"period": item["trade_date"], "amount_hkd_billion": round(cumulative_total, 4)})
if item["net"] > 0:
streak_in += 1
streak_out = 0
elif item["net"] < 0:
streak_out += 1
streak_in = 0
else:
streak_in = 0
streak_out = 0
longest_in = max(longest_in, streak_in)
longest_out = max(longest_out, streak_out)
for item in reversed(ordered[-20:]):
recent_trade_days.append(
{
"trade_date": item["trade_date"],
"total_net_inflow_hkd_billion": round(item["net"], 4),
"precision": "historical_exact",
}
)
return {
"start_date": start_date,
"daily": daily,
"weekly": [{"period": period, "amount_hkd_billion": round(value, 4)} for period, value in sorted(weekly_map.items())],
"monthly": [{"period": period, "amount_hkd_billion": round(value, 4)} for period, value in sorted(monthly_map.items())],
"cumulative": cumulative,
"benchmark_history": self._build_history_benchmarks(start_date),
"recent_trade_days": recent_trade_days,
"summary": {
"cumulative_net_inflow_hkd_billion": round(cumulative_total, 4),
"trading_day_count": len(ordered),
"max_single_day_inflow_hkd_billion": round(max((item["net"] for item in ordered), default=0), 4),
"max_single_day_outflow_hkd_billion": round(min((item["net"] for item in ordered), default=0), 4),
"longest_inflow_streak": longest_in,
"longest_outflow_streak": longest_out,
},
}
def _build_history_benchmarks(self, start_date: str) -> dict[str, list[dict]]:
benchmark_history: dict[str, list[dict]] = {}
for benchmark in HISTORY_BENCHMARKS:
response = self.client.fetch_stock_kline(benchmark["secid"], limit=160)
data = response.get("data") or {}
daily_rows = []
for line in data.get("klines") or []:
parts = line.split(",")
if len(parts) < 2 or parts[0] < start_date:
continue
daily_rows.append({"period": parts[0], "amount_hkd_billion": round(self._safe_float(parts[2]) or 0.0, 4)})
weekly_map: dict[str, float] = {}
monthly_map: dict[str, float] = {}
for item in daily_rows:
date_obj = datetime.strptime(item["period"], "%Y-%m-%d")
weekly_map[f"{date_obj.strftime('%Y')}-W{date_obj.strftime('%W')}"] = item["amount_hkd_billion"]
monthly_map[date_obj.strftime("%Y-%m")] = item["amount_hkd_billion"]
benchmark_history["hstech_daily"] = daily_rows
benchmark_history["hstech_weekly"] = [{"period": period, "amount_hkd_billion": value} for period, value in sorted(weekly_map.items())]
benchmark_history["hstech_monthly"] = [{"period": period, "amount_hkd_billion": value} for period, value in sorted(monthly_map.items())]
return benchmark_history
def sync(self, start_date: str | None = None) -> dict:
config = self.repository.get_system_config()
history_start = start_date or config.get("history_backfill_start_date", HISTORY_START_DATE)
threshold_step = float(config.get("threshold_step_hkd_billion", 50))
diagnostics = self.repository.get_source_diagnostics()
try:
realtime_payload = self.client.fetch_realtime_overview()
timeline_payload = self.client.fetch_intraday_timeline()
history_rows = self.client.fetch_history(history_start)
snapshot, benchmark_raw_payloads = self._build_snapshot(realtime_payload, timeline_payload, threshold_step)
history = self._aggregate_history(history_rows, history_start)
updated_at = datetime.now(self.tz).isoformat(timespec="seconds")
self.repository.save_snapshot(snapshot["trade_date"], snapshot)
self.repository.save_history(history)
self.repository.save_raw_payload(f"eastmoney_realtime_{snapshot['trade_date']}", realtime_payload)
self.repository.save_raw_payload(f"eastmoney_timeline_{snapshot['trade_date']}", timeline_payload)
self.repository.save_raw_payload(f"eastmoney_benchmarks_{snapshot['trade_date']}", benchmark_raw_payloads)
self.repository.save_raw_payload(f"eastmoney_history_{snapshot['trade_date']}", {"rows": history_rows})
config.update(
{
"source_name": "东方财富",
"source_strategy": "已接入东方财富历史与实时公开接口;历史来自 datacenter-web实时来自 push2。",
}
)
self.repository.save_system_config(config)
self.repository.save_source_diagnostics(
{
"source_name": "东方财富",
"realtime_available": True,
"historical_available": True,
"last_success_at": updated_at,
"last_failure_at": None,
"last_error_reason": None,
"last_success_url": "https://datacenter-web.eastmoney.com/api/data/v1/get",
"last_persisted_at": updated_at,
}
)
return {
"snapshot": snapshot,
"history_summary": history["summary"],
"history_daily_count": len(history["daily"]),
"alert_records": alert_engine.evaluate(snapshot),
}
except Exception as exc:
updated_at = datetime.now(self.tz).isoformat(timespec="seconds")
self.repository.save_source_diagnostics(
{
"source_name": "东方财富",
"realtime_available": False,
"historical_available": False,
"last_success_at": diagnostics.get("last_success_at"),
"last_failure_at": updated_at,
"last_error_reason": str(exc),
"last_success_url": diagnostics.get("last_success_url"),
"last_persisted_at": diagnostics.get("last_persisted_at"),
}
)
raise
eastmoney_sync_service = EastmoneySyncService()