File size: 4,362 Bytes
ed9d6ac | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 | #!/usr/bin/env python3
"""
本地最小验证:5 个交易日,验证 pipeline 正确性。
用法:
python scripts/run_local_test.py
"""
import os
import sys
from pathlib import Path
# 保证项目根在 path 里
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from src.data.loader import load_l2_day, BLACKLIST_DATES
from src.features.passive_orders import (
compute_vwap,
extract_passive_orders,
prepare_features,
select_candidates,
)
from src.clustering.daily_cluster import cluster_candidates
from src.matching.cross_day_match import match_clusters, match_multi_window
from src.tracking.entity_tracker import EntityTracker
# 测试日期:2024年3月第二周(避开黑名单)
TEST_DATES = [20240311, 20240312, 20240313, 20240314, 20240315]
OUTPUT_DIR = os.path.join(
os.path.dirname(__file__), "..", "outputs", "local_test"
)
def main():
os.makedirs(OUTPUT_DIR, exist_ok=True)
print(f"本地测试: {len(TEST_DATES)} 天, 输出目录: {OUTPUT_DIR}\n")
tracker = EntityTracker(inactive_threshold=5)
recent_history = {}
for i, date in enumerate(TEST_DATES):
print(f"--- {date} ---")
# 1. 加载
try:
data = load_l2_day(date)
except Exception as e:
print(f" SKIP: 加载失败 ({e})")
continue
trades = data["trades"]
orders = data["orders"]
if "is_cancellation" in trades.columns:
trades = trades[~trades["is_cancellation"]]
print(f" trades={len(trades):,}, orders={len(orders):,}")
# 2. VWAP + 被动单
vwap = compute_vwap(trades)
passive = extract_passive_orders(trades, vwap)
candidates = select_candidates(passive, top_n=150)
print(f" passive_orders={len(passive):,}, candidates={len(candidates)}")
print(f" VWAP={vwap:.2f}, bid_candidates={len(candidates[candidates['side']=='bid'])}, ask_candidates={len(candidates[candidates['side']=='ask'])}")
if candidates.empty:
recent_history[date] = {}
continue
# 3. 聚类
feats = prepare_features(candidates)
labeled, centroids = cluster_candidates(candidates, feats)
n_clusters = len(centroids)
n_noise = (labeled["cluster_id"] == -1).sum()
print(f" clusters={n_clusters}, noise={n_noise}")
# 4. 跨日匹配
prev_dates = sorted(
[d for d in recent_history.keys() if d < date]
)[-2:]
prev_c_for_match = {}
for pd_ in prev_dates:
if recent_history.get(pd_):
prev_c_for_match[pd_] = recent_history[pd_]
matches = match_multi_window(date, centroids, prev_c_for_match)
print(f" matches={len(matches)}")
for m in matches:
print(f" {m[0]} c{m[1]} → {m[2]} cost={m[3]:.3f}")
# 5. 实体追踪
cid_to_eid = tracker.process_day(date, centroids, matches)
print(f" entity mapping: {cid_to_eid}")
# 6. 仓位推断
signal = tracker.compute_position_signal(date)
print(f" signal: score={signal['score']:.4f}, bid_entities={signal['bid_entities']}, ask_entities={signal['ask_entities']}")
recent_history[date] = centroids
# ---- 导出 ----
print("\n===== 导出 =====")
entity_df = tracker.get_entity_timeline()
print(f"实体总数: {len(entity_df)}")
print(entity_df.to_string())
entity_path = os.path.join(OUTPUT_DIR, "entity_timeline.parquet")
entity_df.to_parquet(entity_path)
print(f"实体表 → {entity_path}")
signals_df = tracker.get_daily_signals()
signals_path = os.path.join(OUTPUT_DIR, "position_signal_daily.parquet")
signals_df.to_parquet(signals_path)
print(f"信号表 → {signals_path}")
print(signals_df[["date", "score", "score_z", "n_active_entities"]].to_string())
# 被动单样本(第一天)
passive_path = os.path.join(OUTPUT_DIR, "sample_passive_orders.parquet")
passive.to_parquet(passive_path)
print(f"被动单样本 → {passive_path}")
# 聚类样本
cluster_path = os.path.join(OUTPUT_DIR, "sample_clusters.parquet")
labeled.to_parquet(cluster_path)
print(f"聚类样本 → {cluster_path}")
print("\n===== 本地验证完成 =====")
if __name__ == "__main__":
main()
|