| |
| """ |
| 加载已训练追踪器,处理新交易日。 |
| |
| 用法: |
| # 处理单个新日 |
| python scripts/inference.py --date 20260401 --tracker outputs/tracker_state.pkl --output outputs/new_day/ |
| |
| # 批量处理 |
| python scripts/inference.py --date 20260401,20260402,20260403 --tracker outputs/tracker_state.pkl |
| """ |
|
|
| from __future__ import annotations |
|
|
| import argparse |
| import os |
| import sys |
| from pathlib import Path |
|
|
| import pandas as pd |
|
|
| sys.path.insert(0, str(Path(__file__).resolve().parent.parent)) |
|
|
| from src.data.loader import load_l2_day, BLACKLIST_DATES |
| from src.features.passive_orders import ( |
| compute_vwap, |
| extract_passive_orders, |
| prepare_features, |
| select_candidates, |
| ) |
| from src.clustering.daily_cluster import cluster_candidates |
| from src.matching.cross_day_match import match_multi_window |
| from src.tracking.entity_tracker import EntityTracker |
|
|
|
|
| def process_new_day( |
| tracker: EntityTracker, |
| date: int, |
| max_cost: float = 3.5, |
| ) -> dict: |
| """处理一个新交易日,更新追踪器并返回信号。""" |
| if date in BLACKLIST_DATES: |
| print(f"[{date}] blacklisted, skip") |
| return {"date": date, "score": 0.0, "error": "blacklisted"} |
|
|
| |
| try: |
| data = load_l2_day(date) |
| except Exception as e: |
| print(f"[{date}] load failed: {e}") |
| return {"date": date, "score": 0.0, "error": str(e)} |
|
|
| trades = data["trades"] |
| if "is_cancellation" in trades.columns: |
| trades = trades[~trades["is_cancellation"]] |
| trades = trades[trades["bs_flag_desc"].isin(["active_buy", "active_sell"])] |
| if trades.empty: |
| return {"date": date, "score": 0.0, "error": "empty trades"} |
|
|
| |
| vwap = compute_vwap(trades) |
| passive = extract_passive_orders(trades, vwap) |
| candidates = select_candidates(passive, top_n=150) |
| if candidates.empty or len(candidates) < 5: |
| return {"date": date, "score": 0.0, "error": "too few candidates"} |
|
|
| feats = prepare_features(candidates) |
| _, centroids = cluster_candidates(candidates, feats) |
|
|
| if not centroids: |
| return {"date": date, "score": 0.0, "n_clusters": 0} |
|
|
| |
| recent = {} |
| for prev_date in sorted(tracker._processed_dates)[-2:]: |
| |
| day_clusters = { |
| cid: tracker.entities[eid] |
| for (d, cid), eid in tracker.cluster_registry.items() |
| if d == prev_date and eid in tracker.entities |
| } |
| if day_clusters: |
| |
| recent[prev_date] = {} |
| for cid, e in day_clusters.items(): |
| if e.get("centroids"): |
| recent[prev_date][int(cid)] = { |
| "centroid_scaled": e["centroids"][-1][1] |
| if isinstance(e["centroids"][-1], tuple) |
| else e["centroids"][-1], |
| "centroid": e["centroids"][-1][1] |
| if isinstance(e["centroids"][-1], tuple) |
| else e["centroids"][-1], |
| "total_amount": e.get("total_amount_latest", 0), |
| "size": e.get("cluster_count", 1), |
| "dominant_side": e.get("dominant_sides", ["unknown"])[-1], |
| "bid_ratio": e.get("bid_ratio", 0.5), |
| } |
|
|
| matches = match_multi_window(date, centroids, recent, max_cost=max_cost) |
|
|
| |
| tracker.process_day(date, centroids, matches) |
| signal = tracker.compute_position_signal(date) |
| signal["date"] = date |
|
|
| print( |
| f"[{date}] clusters={len(centroids)}, " |
| f"matches={len(matches)}, " |
| f"entities={len(tracker.entities)}, " |
| f"score={signal['score']:.4f}" |
| ) |
|
|
| return signal |
|
|
|
|
| def main(): |
| parser = argparse.ArgumentParser(description="新日推理") |
| parser.add_argument("--date", required=True, help="日期 YYYYMMDD,多个用逗号分隔") |
| parser.add_argument("--tracker", default="./outputs/tracker_state.pkl", help="追踪器状态文件") |
| parser.add_argument("--output", default="./outputs/new_day", help="输出目录") |
| parser.add_argument("--save-state", action="store_true", help="推理后保存更新状态") |
| args = parser.parse_args() |
|
|
| |
| print(f"Loading tracker from {args.tracker}") |
| tracker = EntityTracker.load_state(args.tracker) |
| print(f" Loaded: {len(tracker.entities)} entities, {len(tracker._processed_dates)} processed days") |
| print(f" Last processed: {max(tracker._processed_dates) if tracker._processed_dates else 'N/A'}") |
|
|
| dates = [int(d.strip()) for d in args.date.split(",")] |
|
|
| os.makedirs(args.output, exist_ok=True) |
|
|
| signals = [] |
| for date in sorted(dates): |
| sig = process_new_day(tracker, date) |
| signals.append(sig) |
|
|
| signals_df = pd.DataFrame(signals) |
|
|
| |
| hist_signals = tracker.get_daily_signals() |
| all_signals = pd.concat([hist_signals, signals_df], ignore_index=True) |
|
|
| sig_path = os.path.join(args.output, "position_signal_daily.parquet") |
| all_signals.to_parquet(sig_path) |
| print(f"\nSignals saved to {sig_path}") |
|
|
| if args.save_state: |
| state_path = os.path.join(args.output, "tracker_state_updated.pkl") |
| tracker.save_state(state_path) |
|
|
| |
| print("\n===== 最新信号 =====") |
| for sig in signals: |
| print( |
| f" {sig['date']}: score={sig.get('score', 'N/A')}, " |
| f"bid={sig.get('bid_entities', 'N/A')}, " |
| f"ask={sig.get('ask_entities', 'N/A')}" |
| ) |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|