| """ |
| 完整流水线:遍历日期范围,执行五阶段处理。 |
| |
| 本地测试: |
| python src/scripts/pipeline.py --start 20240311 --end 20240315 --output ./outputs |
| |
| Sandbox 全量: |
| python src/scripts/pipeline.py --start 20230101 --end 20260331 --output ./outputs --full |
| """ |
|
|
| from __future__ import annotations |
|
|
| import argparse |
| import json |
| import os |
| import sys |
| from collections import defaultdict |
| from pathlib import Path |
| from typing import Dict, List |
|
|
| import numpy as np |
| import pandas as pd |
| from tqdm import tqdm |
|
|
| |
| sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent)) |
|
|
| from src.data.loader import load_l2_day, BLACKLIST_DATES |
| from src.features.passive_orders import ( |
| compute_vwap, |
| extract_passive_orders, |
| prepare_features, |
| select_candidates, |
| ) |
| from src.clustering.daily_cluster import cluster_candidates |
| from src.matching.cross_day_match import match_multi_window |
| from src.tracking.entity_tracker import EntityTracker |
|
|
|
|
| def _make_output_dirs(base: str): |
| for d in ["passive_orders", "clusters", "matches", "signals", "reports"]: |
| os.makedirs(os.path.join(base, d), exist_ok=True) |
|
|
|
|
| def process_single_day( |
| date: int, |
| output_base: str, |
| save_intermediate: bool = False, |
| ) -> tuple: |
| """ |
| 处理单个交易日。 |
| |
| Returns: |
| (candidates_df, clusters_dict, centroids_dict) 或空 |
| """ |
| try: |
| data = load_l2_day(date) |
| except Exception as e: |
| print(f" [{date}] 加载失败: {e}") |
| return None, {} |
|
|
| trades = data["trades"] |
|
|
| |
| if "is_cancellation" in trades.columns: |
| trades = trades[~trades["is_cancellation"]] |
|
|
| if trades.empty: |
| return None, {} |
|
|
| vwap = compute_vwap(trades) |
|
|
| |
| passive = extract_passive_orders(trades, vwap) |
| if passive.empty: |
| return None, {} |
|
|
| candidates = select_candidates(passive, top_n=150) |
| if candidates.empty: |
| return None, {} |
|
|
| if save_intermediate: |
| candidates.to_parquet( |
| os.path.join(output_base, "passive_orders", f"{date}.parquet"), |
| index=False, |
| ) |
|
|
| |
| feats = prepare_features(candidates) |
| if feats.shape[0] < 5: |
| candidates["cluster_id"] = -1 |
| return candidates, {} |
|
|
| labeled, centroids = cluster_candidates(candidates, feats) |
|
|
| if save_intermediate: |
| labeled.to_parquet( |
| os.path.join(output_base, "clusters", f"{date}.parquet"), |
| index=False, |
| ) |
|
|
| return labeled, centroids |
|
|
|
|
| def run_pipeline( |
| dates: List[int], |
| output_base: str, |
| save_intermediate: bool = False, |
| ): |
| """执行完整流水线。""" |
| _make_output_dirs(output_base) |
|
|
| tracker = EntityTracker(inactive_threshold=5) |
|
|
| |
| recent_history: Dict[int, Dict[int, dict]] = {} |
| |
| daily_labeled: Dict[int, pd.DataFrame] = {} |
|
|
| all_signals = [] |
| all_matches = [] |
|
|
| print(f"处理 {len(dates)} 个交易日 ({dates[0]} ~ {dates[-1]})") |
|
|
| for i, date in enumerate(tqdm(dates, desc="Processing days")): |
| if date in BLACKLIST_DATES: |
| continue |
|
|
| labeled, centroids = process_single_day( |
| date, output_base, save_intermediate |
| ) |
|
|
| if labeled is None or not centroids: |
| recent_history[date] = {} |
| daily_labeled[date] = labeled if labeled is not None else pd.DataFrame() |
| |
| for old_date in list(recent_history.keys()): |
| if date - old_date > 3: |
| del recent_history[old_date] |
| continue |
|
|
| daily_labeled[date] = labeled |
| recent_history[date] = centroids |
|
|
| |
| |
| prev_dates = [d for d in sorted(recent_history.keys()) if d < date] |
| prev_dates = prev_dates[-2:] |
|
|
| prev_centroids_for_match = { |
| d: recent_history[d] for d in prev_dates if recent_history.get(d) |
| } |
|
|
| matches = match_multi_window( |
| date, centroids, prev_centroids_for_match, max_cost=3.5 |
| ) |
|
|
| |
| for prev_d, prev_cid, curr_cid, cost in matches: |
| all_matches.append({ |
| "date": date, |
| "prev_date": prev_d, |
| "prev_cid": prev_cid, |
| "curr_cid": curr_cid, |
| "cost": cost, |
| }) |
|
|
| |
| cid_to_entity = tracker.process_day(date, centroids, matches) |
|
|
| |
| signal = tracker.compute_position_signal(date) |
| signal["date"] = date |
| all_signals.append(signal) |
|
|
| |
| for old_date in list(recent_history.keys()): |
| if date - old_date > 3: |
| del recent_history[old_date] |
|
|
| |
|
|
| |
| entity_df = tracker.get_entity_timeline() |
| entity_df.to_parquet(os.path.join(output_base, "entity_timeline.parquet")) |
|
|
| |
| cluster_reg_df = tracker.get_cluster_registry_table() |
| cluster_reg_df.to_parquet(os.path.join(output_base, "cluster_registry.parquet")) |
|
|
| |
| signals_df = tracker.get_daily_signals() |
| signals_df.to_parquet(os.path.join(output_base, "signals", "position_signal_daily.parquet")) |
|
|
| |
| if all_matches: |
| pd.DataFrame(all_matches).to_parquet( |
| os.path.join(output_base, "matches", "match_pairs.parquet") |
| ) |
|
|
| |
| tracker.save_state(os.path.join(output_base, "tracker_state.pkl")) |
|
|
| |
| report = { |
| "total_dates_processed": len(dates), |
| "total_entities_discovered": len(tracker.entities), |
| "active_entities": int(entity_df["status"].value_counts().get("active", 0)), |
| "inactive_entities": int(entity_df["status"].value_counts().get("inactive", 0)), |
| "avg_entities_per_day": float( |
| np.mean([len(v) for v in tracker.daily_active.values()]) |
| ) if tracker.daily_active else 0, |
| "bid_dominant_entities": int((entity_df["dominant_side"] == "bid").sum()), |
| "ask_dominant_entities": int((entity_df["dominant_side"] == "ask").sum()), |
| "total_matches": len(all_matches), |
| "signal_summary": { |
| "mean_score": float(signals_df["score"].mean()) if len(signals_df) > 0 else 0, |
| "std_score": float(signals_df["score"].std()) if len(signals_df) > 0 else 0, |
| }, |
| } |
|
|
| with open(os.path.join(output_base, "reports", "summary.json"), "w") as f: |
| json.dump(report, f, indent=2, ensure_ascii=False) |
|
|
| print(f"\n===== 汇总 =====") |
| print(f"交易日: {len(dates)}") |
| print(f"发现实体总数: {report['total_entities_discovered']}") |
| print(f"活跃实体: {report['active_entities']}, 已退出: {report['inactive_entities']}") |
| print(f"买方实体: {report['bid_dominant_entities']}, 卖方实体: {report['ask_dominant_entities']}") |
| print(f"跨日匹配对数: {report['total_matches']}") |
| print(f"输出目录: {output_base}") |
|
|
|
|
| def main(): |
| parser = argparse.ArgumentParser(description="跨日主力行为指纹追踪") |
| parser.add_argument("--start", type=int, required=True, help="起始日期 YYYYMMDD") |
| parser.add_argument("--end", type=int, required=True, help="结束日期 YYYYMMDD") |
| parser.add_argument("--output", type=str, default="./outputs", help="输出目录") |
| parser.add_argument("--full", action="store_true", help="全量模式(无交互)") |
| parser.add_argument("--save-intermediate", action="store_true", help="保存中间产物") |
| parser.add_argument("--dates", type=str, nargs="*", help="指定日期列表(覆盖 start/end)") |
| args = parser.parse_args() |
|
|
| if args.dates: |
| dates = sorted(int(d) for d in args.dates) |
| else: |
| |
| from datetime import datetime, timedelta |
|
|
| start_dt = datetime.strptime(str(args.start), "%Y%m%d") |
| end_dt = datetime.strptime(str(args.end), "%Y%m%d") |
| dates = [] |
| curr = start_dt |
| while curr <= end_dt: |
| d = int(curr.strftime("%Y%m%d")) |
| if d not in BLACKLIST_DATES and curr.weekday() < 5: |
| dates.append(d) |
| curr += timedelta(days=1) |
|
|
| run_pipeline(dates, args.output, save_intermediate=args.save_intermediate) |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|