| |
| """ |
| 本地最小验证:5 个交易日,验证 pipeline 正确性。 |
| |
| 用法: |
| python scripts/run_local_test.py |
| """ |
|
|
| import os |
| import sys |
| from pathlib import Path |
|
|
| |
| sys.path.insert(0, str(Path(__file__).resolve().parent.parent)) |
|
|
| from src.data.loader import load_l2_day, BLACKLIST_DATES |
| from src.features.passive_orders import ( |
| compute_vwap, |
| extract_passive_orders, |
| prepare_features, |
| select_candidates, |
| ) |
| from src.clustering.daily_cluster import cluster_candidates |
| from src.matching.cross_day_match import match_clusters, match_multi_window |
| from src.tracking.entity_tracker import EntityTracker |
|
|
| |
| TEST_DATES = [20240311, 20240312, 20240313, 20240314, 20240315] |
|
|
| OUTPUT_DIR = os.path.join( |
| os.path.dirname(__file__), "..", "outputs", "local_test" |
| ) |
|
|
|
|
| def main(): |
| os.makedirs(OUTPUT_DIR, exist_ok=True) |
| print(f"本地测试: {len(TEST_DATES)} 天, 输出目录: {OUTPUT_DIR}\n") |
|
|
| tracker = EntityTracker(inactive_threshold=5) |
| recent_history = {} |
|
|
| for i, date in enumerate(TEST_DATES): |
| print(f"--- {date} ---") |
|
|
| |
| try: |
| data = load_l2_day(date) |
| except Exception as e: |
| print(f" SKIP: 加载失败 ({e})") |
| continue |
|
|
| trades = data["trades"] |
| orders = data["orders"] |
| if "is_cancellation" in trades.columns: |
| trades = trades[~trades["is_cancellation"]] |
|
|
| print(f" trades={len(trades):,}, orders={len(orders):,}") |
|
|
| |
| vwap = compute_vwap(trades) |
| passive = extract_passive_orders(trades, vwap) |
| candidates = select_candidates(passive, top_n=150) |
| print(f" passive_orders={len(passive):,}, candidates={len(candidates)}") |
| print(f" VWAP={vwap:.2f}, bid_candidates={len(candidates[candidates['side']=='bid'])}, ask_candidates={len(candidates[candidates['side']=='ask'])}") |
|
|
| if candidates.empty: |
| recent_history[date] = {} |
| continue |
|
|
| |
| feats = prepare_features(candidates) |
| labeled, centroids = cluster_candidates(candidates, feats) |
| n_clusters = len(centroids) |
| n_noise = (labeled["cluster_id"] == -1).sum() |
| print(f" clusters={n_clusters}, noise={n_noise}") |
|
|
| |
| prev_dates = sorted( |
| [d for d in recent_history.keys() if d < date] |
| )[-2:] |
| prev_c_for_match = {} |
| for pd_ in prev_dates: |
| if recent_history.get(pd_): |
| prev_c_for_match[pd_] = recent_history[pd_] |
|
|
| matches = match_multi_window(date, centroids, prev_c_for_match) |
| print(f" matches={len(matches)}") |
| for m in matches: |
| print(f" {m[0]} c{m[1]} → {m[2]} cost={m[3]:.3f}") |
|
|
| |
| cid_to_eid = tracker.process_day(date, centroids, matches) |
| print(f" entity mapping: {cid_to_eid}") |
|
|
| |
| signal = tracker.compute_position_signal(date) |
| print(f" signal: score={signal['score']:.4f}, bid_entities={signal['bid_entities']}, ask_entities={signal['ask_entities']}") |
|
|
| recent_history[date] = centroids |
|
|
| |
| print("\n===== 导出 =====") |
|
|
| entity_df = tracker.get_entity_timeline() |
| print(f"实体总数: {len(entity_df)}") |
| print(entity_df.to_string()) |
|
|
| entity_path = os.path.join(OUTPUT_DIR, "entity_timeline.parquet") |
| entity_df.to_parquet(entity_path) |
| print(f"实体表 → {entity_path}") |
|
|
| signals_df = tracker.get_daily_signals() |
| signals_path = os.path.join(OUTPUT_DIR, "position_signal_daily.parquet") |
| signals_df.to_parquet(signals_path) |
| print(f"信号表 → {signals_path}") |
| print(signals_df[["date", "score", "score_z", "n_active_entities"]].to_string()) |
|
|
| |
| passive_path = os.path.join(OUTPUT_DIR, "sample_passive_orders.parquet") |
| passive.to_parquet(passive_path) |
| print(f"被动单样本 → {passive_path}") |
|
|
| |
| cluster_path = os.path.join(OUTPUT_DIR, "sample_clusters.parquet") |
| labeled.to_parquet(cluster_path) |
| print(f"聚类样本 → {cluster_path}") |
|
|
| print("\n===== 本地验证完成 =====") |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|