File size: 5,776 Bytes
ed9d6ac | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 | #!/usr/bin/env python3
"""
加载已训练追踪器,处理新交易日。
用法:
# 处理单个新日
python scripts/inference.py --date 20260401 --tracker outputs/tracker_state.pkl --output outputs/new_day/
# 批量处理
python scripts/inference.py --date 20260401,20260402,20260403 --tracker outputs/tracker_state.pkl
"""
from __future__ import annotations
import argparse
import os
import sys
from pathlib import Path
import pandas as pd
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from src.data.loader import load_l2_day, BLACKLIST_DATES
from src.features.passive_orders import (
compute_vwap,
extract_passive_orders,
prepare_features,
select_candidates,
)
from src.clustering.daily_cluster import cluster_candidates
from src.matching.cross_day_match import match_multi_window
from src.tracking.entity_tracker import EntityTracker
def process_new_day(
tracker: EntityTracker,
date: int,
max_cost: float = 3.5,
) -> dict:
"""处理一个新交易日,更新追踪器并返回信号。"""
if date in BLACKLIST_DATES:
print(f"[{date}] blacklisted, skip")
return {"date": date, "score": 0.0, "error": "blacklisted"}
# 加载数据
try:
data = load_l2_day(date)
except Exception as e:
print(f"[{date}] load failed: {e}")
return {"date": date, "score": 0.0, "error": str(e)}
trades = data["trades"]
if "is_cancellation" in trades.columns:
trades = trades[~trades["is_cancellation"]]
trades = trades[trades["bs_flag_desc"].isin(["active_buy", "active_sell"])]
if trades.empty:
return {"date": date, "score": 0.0, "error": "empty trades"}
# 被动单 + 聚类
vwap = compute_vwap(trades)
passive = extract_passive_orders(trades, vwap)
candidates = select_candidates(passive, top_n=150)
if candidates.empty or len(candidates) < 5:
return {"date": date, "score": 0.0, "error": "too few candidates"}
feats = prepare_features(candidates)
_, centroids = cluster_candidates(candidates, feats)
if not centroids:
return {"date": date, "score": 0.0, "n_clusters": 0}
# 跨日匹配:往前看最近已处理日
recent = {}
for prev_date in sorted(tracker._processed_dates)[-2:]:
# 收集该日的簇信息(通过 cluster_registry 反查)
day_clusters = {
cid: tracker.entities[eid]
for (d, cid), eid in tracker.cluster_registry.items()
if d == prev_date and eid in tracker.entities
}
if day_clusters:
# 重建 centroid 信息(用最近一次记录的)
recent[prev_date] = {}
for cid, e in day_clusters.items():
if e.get("centroids"):
recent[prev_date][int(cid)] = {
"centroid_scaled": e["centroids"][-1][1]
if isinstance(e["centroids"][-1], tuple)
else e["centroids"][-1],
"centroid": e["centroids"][-1][1]
if isinstance(e["centroids"][-1], tuple)
else e["centroids"][-1],
"total_amount": e.get("total_amount_latest", 0),
"size": e.get("cluster_count", 1),
"dominant_side": e.get("dominant_sides", ["unknown"])[-1],
"bid_ratio": e.get("bid_ratio", 0.5),
}
matches = match_multi_window(date, centroids, recent, max_cost=max_cost)
# 更新追踪器
tracker.process_day(date, centroids, matches)
signal = tracker.compute_position_signal(date)
signal["date"] = date
print(
f"[{date}] clusters={len(centroids)}, "
f"matches={len(matches)}, "
f"entities={len(tracker.entities)}, "
f"score={signal['score']:.4f}"
)
return signal
def main():
parser = argparse.ArgumentParser(description="新日推理")
parser.add_argument("--date", required=True, help="日期 YYYYMMDD,多个用逗号分隔")
parser.add_argument("--tracker", default="./outputs/tracker_state.pkl", help="追踪器状态文件")
parser.add_argument("--output", default="./outputs/new_day", help="输出目录")
parser.add_argument("--save-state", action="store_true", help="推理后保存更新状态")
args = parser.parse_args()
# 加载追踪器
print(f"Loading tracker from {args.tracker}")
tracker = EntityTracker.load_state(args.tracker)
print(f" Loaded: {len(tracker.entities)} entities, {len(tracker._processed_dates)} processed days")
print(f" Last processed: {max(tracker._processed_dates) if tracker._processed_dates else 'N/A'}")
dates = [int(d.strip()) for d in args.date.split(",")]
os.makedirs(args.output, exist_ok=True)
signals = []
for date in sorted(dates):
sig = process_new_day(tracker, date)
signals.append(sig)
signals_df = pd.DataFrame(signals)
# 合并历史信号
hist_signals = tracker.get_daily_signals()
all_signals = pd.concat([hist_signals, signals_df], ignore_index=True)
sig_path = os.path.join(args.output, "position_signal_daily.parquet")
all_signals.to_parquet(sig_path)
print(f"\nSignals saved to {sig_path}")
if args.save_state:
state_path = os.path.join(args.output, "tracker_state_updated.pkl")
tracker.save_state(state_path)
# 最新信号
print("\n===== 最新信号 =====")
for sig in signals:
print(
f" {sig['date']}: score={sig.get('score', 'N/A')}, "
f"bid={sig.get('bid_entities', 'N/A')}, "
f"ask={sig.get('ask_entities', 'N/A')}"
)
if __name__ == "__main__":
main()
|