File size: 5,560 Bytes
ed73de7 4fc0527 ed73de7 4fc0527 ed73de7 d91ab53 ed73de7 668e3a5 ed73de7 d91ab53 4fc0527 ed73de7 d91ab53 ed73de7 d91ab53 4fc0527 ed73de7 d91ab53 ed73de7 4fc0527 668e3a5 4fc0527 668e3a5 4fc0527 668e3a5 4fc0527 668e3a5 4fc0527 668e3a5 4fc0527 668e3a5 4fc0527 ed73de7 d91ab53 ed73de7 668e3a5 ed73de7 d91ab53 ed73de7 4fc0527 668e3a5 4fc0527 ed73de7 4fc0527 d91ab53 668e3a5 d91ab53 ed73de7 d91ab53 668e3a5 ed73de7 d91ab53 668e3a5 ed73de7 d91ab53 668e3a5 4fc0527 d91ab53 4fc0527 d91ab53 ed73de7 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 | """
Alpha Factory β Gradio UI
View generated alphas, copy expressions, run new batches.
Run: uv run python -m alpha_factory.ui
"""
import os
import sys
import subprocess
import duckdb
import gradio as gr
from pathlib import Path
try:
from dotenv import load_dotenv
load_dotenv()
except ImportError:
pass
DB_PATH = Path("factor_store/alphas.duckdb")
def get_alphas_from_db(limit=50):
if not DB_PATH.exists():
return []
conn = duckdb.connect(str(DB_PATH), read_only=True)
try:
rows = conn.execute(f"""
SELECT
alpha_id,
submitted_at,
expression,
theme,
archetype,
anomaly_tag,
neutralization,
decay,
fields_used,
verdict
FROM alphas
ORDER BY submitted_at DESC
LIMIT {limit}
""").fetchall()
return rows
except Exception:
return []
finally:
conn.close()
def get_alpha_cards():
rows = get_alphas_from_db()
if not rows:
return [["β", "β", "β", "β", "β", "β", "β", "Run a batch first"]]
data = []
for row in rows:
alpha_id, submitted_at, expression, theme, archetype, tag, neutral, decay, fields, verdict = row
timestamp = submitted_at.strftime("%Y-%m-%d %H:%M") if submitted_at else "?"
verdict_str = {"promote": "PASS", "iterate": "PENDING", "kill": "FAIL"}.get(verdict or "", "NEW")
expr_preview = (expression[:80] + "...") if expression and len(expression) > 80 else (expression or "")
data.append([timestamp, alpha_id[:10], theme or "", archetype or "", tag or "", str(decay or 0), verdict_str, expr_preview])
return data
def get_full_expression(evt: gr.SelectData):
rows = get_alphas_from_db()
if not rows or evt.index is None:
return "Click a row above to see the full expression"
row_idx = evt.index[0] if isinstance(evt.index, (list, tuple)) else evt.index
if row_idx < len(rows):
return rows[row_idx][2] or ""
return ""
def run_batch(batch_size):
"""Run pipeline as subprocess with forced UTF-8 to avoid Windows encoding crash."""
env = os.environ.copy()
# Force UTF-8 output β prevents Rich/Windows cp1252 crash
env["PYTHONIOENCODING"] = "utf-8"
env["PYTHONLEGACYWINDOWSSTDIO"] = "utf-8"
# Disable Rich color/formatting when piped (cleaner output)
env["NO_COLOR"] = "1"
env["TERM"] = "dumb"
# Ensure HF_TOKEN passes through
if "HF_TOKEN" not in env:
token = os.getenv("HF_TOKEN", "")
if token:
env["HF_TOKEN"] = token
try:
result = subprocess.run(
[sys.executable, "-m", "alpha_factory.run", "--dry-run", "--batch-size", str(int(batch_size))],
capture_output=True,
env=env,
timeout=180,
cwd=str(Path.cwd()),
)
# Decode with utf-8, replace errors
stdout = result.stdout.decode("utf-8", errors="replace") if result.stdout else ""
stderr = result.stderr.decode("utf-8", errors="replace") if result.stderr else ""
log = ""
if stdout:
log = stdout[-3000:]
if result.returncode != 0 and stderr:
log += "\n\n--- ERRORS ---\n" + stderr[-2000:]
if not log.strip():
log = f"Process exited with code {result.returncode}"
return log
except subprocess.TimeoutExpired:
return "ERROR: Pipeline timed out after 180 seconds. Try smaller batch size."
except Exception as e:
return f"ERROR: {str(e)}"
def generate_and_refresh(batch_size):
log = run_batch(batch_size)
table = get_alpha_cards()
return table, log
def build_ui():
with gr.Blocks(title="Alpha Factory") as app:
gr.Markdown("""
# Alpha Factory β Generated Alphas
View, copy, and manage alphas generated by the pipeline.
""")
with gr.Row():
with gr.Column(scale=1):
batch_size_input = gr.Number(value=3, label="Batch Size", minimum=1, maximum=20)
generate_btn = gr.Button("Generate New Batch", variant="primary")
refresh_btn = gr.Button("Refresh Table")
gr.Markdown("*Dry run mode β no BRAIN submissions*")
with gr.Column(scale=3):
stats_md = gr.Markdown(f"**Alphas in store:** {len(get_alphas_from_db())}")
gr.Markdown("### Click any row to see full expression")
alpha_table = gr.Dataframe(
value=get_alpha_cards(),
headers=["Time", "ID", "Theme", "Archetype", "Tag", "Decay", "Status", "Expression"],
interactive=False,
wrap=True,
)
gr.Markdown("### Full Expression β Ctrl+A then Ctrl+C to copy")
full_expr = gr.Textbox(
label="Full Expression",
lines=6,
interactive=True,
)
gr.Markdown("### Pipeline Log")
pipeline_log = gr.Textbox(label="Output", lines=15, interactive=False)
# Events
alpha_table.select(get_full_expression, outputs=[full_expr])
refresh_btn.click(get_alpha_cards, outputs=[alpha_table])
generate_btn.click(
generate_and_refresh,
inputs=[batch_size_input],
outputs=[alpha_table, pipeline_log],
)
return app
if __name__ == "__main__":
app = build_ui()
app.launch(share=False)
|