Spaces:
Running
Running
| import gradio as gr | |
| import requests | |
| import os | |
| import json | |
| # ββ API CONFIG ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| API_KEY = os.getenv("XAI_API_KEY") | |
| API_URL = "https://api.x.ai/v1/chat/completions" | |
| if not API_KEY: | |
| raise ValueError("β XAI_API_KEY not set in Hugging Face Secrets") | |
| def call_llm(system_prompt: str, user_prompt: str, history: list = None) -> str: | |
| try: | |
| messages = [{"role": "system", "content": system_prompt or "You are a helpful AI assistant."}] | |
| if history: | |
| messages.extend(history) | |
| messages.append({"role": "user", "content": user_prompt}) | |
| headers = { | |
| "Authorization": f"Bearer {API_KEY}", | |
| "Content-Type": "application/json" | |
| } | |
| payload = { | |
| "model": "grok-4-1-fast-non-reasoning", | |
| "messages": messages, | |
| "temperature": 0.7, | |
| "max_tokens": 1000 | |
| } | |
| response = requests.post(API_URL, headers=headers, json=payload, timeout=30) | |
| data = response.json() | |
| if "choices" not in data: | |
| return f"β API Error: {json.dumps(data)}" | |
| return data["choices"][0]["message"]["content"] | |
| except Exception as e: | |
| return f"β Request Failed: {str(e)}" | |
| # ββ BLOCK STATE βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| # Each block: {id, type, title, var_name, value/options/prompt/system_prompt/...} | |
| # Stored as JSON string in a hidden gr.State | |
| def empty_state(): | |
| return {"blocks": [], "next_id": 1, "variables": {}, "chat_histories": {}} | |
| def add_block(state, block_type): | |
| s = json.loads(state) | |
| bid = s["next_id"] | |
| s["next_id"] += 1 | |
| defaults = { | |
| "id": bid, | |
| "type": block_type, | |
| "title": f"{block_type.replace('-', ' ').title()} {bid}", | |
| "var_name": f"block_{bid}", | |
| "value": "", | |
| "options": "", | |
| "prompt": "", | |
| "system_prompt": "", | |
| "slider_min": 0, | |
| "slider_max": 100, | |
| "slider_val": 50, | |
| "ref_block": "", | |
| "output": "", | |
| } | |
| s["blocks"].append(defaults) | |
| return json.dumps(s) | |
| def delete_block(state, bid): | |
| s = json.loads(state) | |
| s["blocks"] = [b for b in s["blocks"] if b["id"] != int(bid)] | |
| s["chat_histories"].pop(str(bid), None) | |
| return json.dumps(s) | |
| def update_block_field(state, bid, field, value): | |
| s = json.loads(state) | |
| for b in s["blocks"]: | |
| if b["id"] == int(bid): | |
| b[field] = value | |
| return json.dumps(s) | |
| def resolve_vars(template, variables): | |
| for k, v in variables.items(): | |
| template = template.replace("{{" + k + "}}", str(v)) | |
| return template | |
| def collect_input_values(state): | |
| """Pull current input block values into variables dict.""" | |
| s = json.loads(state) | |
| for b in s["blocks"]: | |
| if b["type"] in ("text-input", "select-input"): | |
| s["variables"][b["var_name"]] = b["value"] | |
| elif b["type"] == "slider-input": | |
| s["variables"][b["var_name"]] = str(b["slider_val"]) | |
| return json.dumps(s) | |
| def run_all_blocks(state): | |
| s = json.loads(state) | |
| log_lines = ["βΆ Run All started"] | |
| # Step 1: collect inputs | |
| for b in s["blocks"]: | |
| if b["type"] == "text-input": | |
| s["variables"][b["var_name"]] = b["value"] | |
| log_lines.append(f"β {b['title']}: \"{b['value']}\"") | |
| elif b["type"] == "select-input": | |
| s["variables"][b["var_name"]] = b["value"] | |
| log_lines.append(f"β {b['title']}: \"{b['value']}\"") | |
| elif b["type"] == "slider-input": | |
| s["variables"][b["var_name"]] = str(b["slider_val"]) | |
| log_lines.append(f"β {b['title']}: {b['slider_val']}") | |
| # Step 2: run AI blocks in order | |
| for b in s["blocks"]: | |
| if b["type"] in ("ai-text", "ai-transform"): | |
| prompt = resolve_vars(b["prompt"] or "Hello", s["variables"]) | |
| log_lines.append(f"β³ Running {b['title']}β¦") | |
| result = call_llm(b["system_prompt"], prompt) | |
| b["output"] = result | |
| s["variables"][b["var_name"]] = result | |
| log_lines.append(f"β {b['title']}: {len(result)} chars") | |
| # Step 3: update display blocks | |
| for b in s["blocks"]: | |
| if b["type"] == "display": | |
| ref = b.get("ref_block", "") | |
| b["output"] = s["variables"].get(ref, "") if ref else "" | |
| log_lines.append("β Run All complete") | |
| s["run_log"] = log_lines | |
| return json.dumps(s) | |
| def run_single_block(state, bid): | |
| s = json.loads(state) | |
| bid = int(bid) | |
| b = next((x for x in s["blocks"] if x["id"] == bid), None) | |
| if not b: | |
| return json.dumps(s) | |
| if b["type"] in ("text-input", "select-input"): | |
| s["variables"][b["var_name"]] = b["value"] | |
| elif b["type"] == "slider-input": | |
| s["variables"][b["var_name"]] = str(b["slider_val"]) | |
| elif b["type"] in ("ai-text", "ai-transform"): | |
| prompt = resolve_vars(b["prompt"] or "Hello", s["variables"]) | |
| result = call_llm(b["system_prompt"], prompt) | |
| b["output"] = result | |
| s["variables"][b["var_name"]] = result | |
| elif b["type"] == "display": | |
| ref = b.get("ref_block", "") | |
| b["output"] = s["variables"].get(ref, "") if ref else "" | |
| return json.dumps(s) | |
| def send_chat(state, bid, user_message): | |
| s = json.loads(state) | |
| bid_str = str(bid) | |
| b = next((x for x in s["blocks"] if x["id"] == int(bid)), None) | |
| if not b or not user_message.strip(): | |
| return json.dumps(s), "" | |
| msg = resolve_vars(user_message.strip(), s["variables"]) | |
| if bid_str not in s["chat_histories"]: | |
| s["chat_histories"][bid_str] = [] | |
| s["chat_histories"][bid_str].append({"role": "user", "content": msg}) | |
| result = call_llm( | |
| b["system_prompt"] or "You are a helpful assistant.", | |
| msg, | |
| history=s["chat_histories"][bid_str][:-1] # all but last user msg | |
| ) | |
| s["chat_histories"][bid_str].append({"role": "assistant", "content": result}) | |
| b["output"] = result | |
| s["variables"][b["var_name"]] = result | |
| return json.dumps(s), "" | |
| # ββ RENDER HELPERS ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| def render_variables_panel(state): | |
| s = json.loads(state) | |
| if not s["variables"]: | |
| return "No variables yet. Run your app to see values." | |
| lines = [] | |
| for k, v in s["variables"].items(): | |
| preview = str(v)[:120] + ("β¦" if len(str(v)) > 120 else "") | |
| lines.append(f"**`{{{{{k}}}}}`**\n{preview}") | |
| return "\n\n---\n\n".join(lines) | |
| def render_log(state): | |
| s = json.loads(state) | |
| log = s.get("run_log", []) | |
| return "\n".join(log) if log else "Empty. Run to see log." | |
| def get_block_output(state, bid): | |
| s = json.loads(state) | |
| b = next((x for x in s["blocks"] if x["id"] == int(bid)), None) | |
| return b["output"] if b else "" | |
| def get_chat_history(state, bid): | |
| s = json.loads(state) | |
| hist = s["chat_histories"].get(str(bid), []) | |
| result = [] | |
| for i in range(0, len(hist)-1, 2): | |
| if i+1 < len(hist): | |
| result.append((hist[i]["content"], hist[i+1]["content"])) | |
| return result | |
| def get_var_names(state): | |
| s = json.loads(state) | |
| return [b["var_name"] for b in s["blocks"] if b["type"] not in ("display",)] | |
| # ββ UI βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| CSS = """ | |
| #app-title {font-size: 26px; font-weight: 800; letter-spacing: -0.5px; margin-bottom: 4px;} | |
| #app-subtitle {color: #888; font-size: 13px; margin-bottom: 20px;} | |
| .block-card {border: 1px solid #2a2a36; border-radius: 12px; padding: 16px; margin-bottom: 14px; background: #16161a;} | |
| .block-card-header {font-weight: 700; font-size: 14px; margin-bottom: 10px; display: flex; align-items: center; gap: 8px;} | |
| .run-btn {background: #34d399 !important; color: #0a2318 !important; font-weight: 700 !important;} | |
| .add-btn {background: #7c6fff !important; color: white !important;} | |
| .output-box textarea {font-family: 'Courier New', monospace !important; font-size: 12px !important;} | |
| .var-panel {font-size: 12px; line-height: 1.8;} | |
| """ | |
| with gr.Blocks(title="Grokflow β AI App Builder") as demo: | |
| state = gr.State(json.dumps(empty_state())) | |
| # ββ HEADER ββ | |
| with gr.Row(): | |
| with gr.Column(): | |
| gr.HTML('<div id="app-title">β‘ GrokFlow</div><div id="app-subtitle">Build AI apps with blocks β powered by xAI Grok</div>') | |
| with gr.Column(scale=0, min_width=120): | |
| run_all_btn = gr.Button("βΆ Run All", variant="primary", elem_classes=["run-btn"]) | |
| gr.Markdown("---") | |
| # ββ ADD BLOCK ROW ββ | |
| with gr.Row(): | |
| gr.Markdown("**Add a block:**") | |
| btn_add_text = gr.Button("π Text Input", size="sm") | |
| btn_add_select = gr.Button("β° Dropdown", size="sm") | |
| btn_add_slider = gr.Button("β Slider", size="sm") | |
| btn_add_ai = gr.Button("β¦ AI Generate", size="sm", elem_classes=["add-btn"]) | |
| btn_add_chat = gr.Button("π¬ AI Chat", size="sm", elem_classes=["add-btn"]) | |
| btn_add_transform= gr.Button("β³ AI Transform", size="sm", elem_classes=["add-btn"]) | |
| btn_add_display = gr.Button("β¦ Display", size="sm") | |
| gr.Markdown("---") | |
| # ββ DYNAMIC BLOCKS AREA ββ | |
| # We render up to 10 blocks. Each slot is hidden until a block occupies it. | |
| # State drives visibility and values. | |
| MAX_BLOCKS = 10 | |
| block_rows = [] | |
| for i in range(MAX_BLOCKS): | |
| with gr.Group(visible=False) as grp: | |
| with gr.Row(): | |
| b_title = gr.Textbox(label="Block title", scale=3, interactive=True) | |
| b_var = gr.Textbox(label="Variable name", scale=2, interactive=True) | |
| b_type_lbl = gr.Textbox(label="Type", scale=1, interactive=False) | |
| b_run = gr.Button("βΆ", size="sm", scale=0, min_width=40) | |
| b_del = gr.Button("β", size="sm", scale=0, min_width=40, variant="stop") | |
| # Input-specific | |
| with gr.Row(visible=True) as row_value: | |
| b_value = gr.Textbox(label="Value / Placeholder", scale=1, interactive=True) | |
| # Slider-specific | |
| with gr.Row(visible=False) as row_slider: | |
| b_slider = gr.Slider(minimum=0, maximum=100, value=50, label="Value", interactive=True) | |
| b_slider_min = gr.Number(value=0, label="Min", precision=0, interactive=True, scale=0, min_width=80) | |
| b_slider_max = gr.Number(value=100, label="Max", precision=0, interactive=True, scale=0, min_width=80) | |
| # Select options | |
| with gr.Row(visible=False) as row_options: | |
| b_options = gr.Textbox(label="Options (one per line)", lines=4, interactive=True) | |
| # AI-specific | |
| with gr.Row(visible=False) as row_ai: | |
| with gr.Column(): | |
| b_system = gr.Textbox(label="System prompt (optional)", interactive=True) | |
| b_prompt = gr.Textbox(label="Prompt β use {{var_name}} to reference blocks", lines=4, interactive=True) | |
| # Chat | |
| with gr.Row(visible=False) as row_chat: | |
| with gr.Column(): | |
| b_chat_system = gr.Textbox(label="System prompt", interactive=True) | |
| b_chatbot = gr.Chatbot(label="Chat", height=200) | |
| with gr.Row(): | |
| b_chat_input = gr.Textbox(label="Message", scale=4, interactive=True) | |
| b_chat_send = gr.Button("Send", scale=1, size="sm") | |
| # Display ref | |
| with gr.Row(visible=False) as row_display: | |
| b_ref = gr.Dropdown(label="Show variable", choices=[], interactive=True) | |
| # Output | |
| with gr.Row(visible=False) as row_output: | |
| b_output = gr.Textbox(label="Output", lines=10, max_lines=10, interactive=False, elem_classes=["output-box"], autoscroll=True) | |
| block_rows.append({ | |
| "group": grp, | |
| "title": b_title, "var": b_var, "type_lbl": b_type_lbl, | |
| "run": b_run, "del": b_del, | |
| "row_value": row_value, "value": b_value, | |
| "row_slider": row_slider, "slider": b_slider, "slider_min": b_slider_min, "slider_max": b_slider_max, | |
| "row_options": row_options, "options": b_options, | |
| "row_ai": row_ai, "system": b_system, "prompt": b_prompt, | |
| "row_chat": row_chat, "chat_system": b_chat_system, "chatbot": b_chatbot, | |
| "chat_input": b_chat_input, "chat_send": b_chat_send, | |
| "row_display": row_display, "ref": b_ref, | |
| "row_output": row_output, "output": b_output, | |
| }) | |
| gr.Markdown("---") | |
| # ββ BOTTOM PANELS ββ | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| gr.Markdown("**Variables**") | |
| vars_md = gr.Markdown("No variables yet.", elem_classes=["var-panel"]) | |
| with gr.Column(scale=1): | |
| gr.Markdown("**Run Log**") | |
| log_box = gr.Textbox(value="Empty. Run to see log.", lines=8, interactive=False, | |
| elem_classes=["output-box"]) | |
| # ββ STATE β UI RENDER βββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| def render_ui(state_json): | |
| s = json.loads(state_json) | |
| blocks = s["blocks"] | |
| updates = [] | |
| for i in range(MAX_BLOCKS): | |
| if i >= len(blocks): | |
| # Hide this slot | |
| updates += [ | |
| gr.update(visible=False), # group | |
| gr.update(value=""), gr.update(value=""), gr.update(value=""), # title, var, type_lbl | |
| gr.update(visible=True), gr.update(value=""), # row_value, value | |
| gr.update(visible=False), gr.update(value=50), # row_slider, slider | |
| gr.update(value=0), gr.update(value=100), # slider_min, slider_max | |
| gr.update(visible=False), gr.update(value=""), # row_options, options | |
| gr.update(visible=False), gr.update(value=""), gr.update(value=""), # row_ai, system, prompt | |
| gr.update(visible=False), gr.update(value=""), # row_chat, chat_system | |
| gr.update(value=[]), # chatbot | |
| gr.update(value=""), # chat_input | |
| gr.update(visible=False), gr.update(choices=[], value=None), # row_display, ref | |
| gr.update(visible=False), gr.update(value=""), # row_output, output | |
| ] | |
| continue | |
| b = blocks[i] | |
| t = b["type"] | |
| is_input = t in ("text-input", "select-input") | |
| is_slider = t == "slider-input" | |
| is_select = t == "select-input" | |
| is_ai = t in ("ai-text", "ai-transform") | |
| is_chat = t == "ai-chat" | |
| is_display = t == "display" | |
| has_output = t in ("ai-text", "ai-transform", "display") | |
| var_choices = [b2["var_name"] for b2 in blocks if b2["id"] != b["id"] and b2["type"] != "display"] | |
| chat_hist = s["chat_histories"].get(str(b["id"]), []) | |
| chat_pairs = [] | |
| for j in range(0, len(chat_hist)-1, 2): | |
| if j+1 < len(chat_hist): | |
| chat_pairs.append((chat_hist[j]["content"], chat_hist[j+1]["content"])) | |
| updates += [ | |
| gr.update(visible=True), # group | |
| gr.update(value=b["title"]), | |
| gr.update(value=b["var_name"]), | |
| gr.update(value=t), | |
| gr.update(visible=is_input), gr.update(value=b["value"]), | |
| gr.update(visible=is_slider), gr.update(value=b["slider_val"], | |
| minimum=b["slider_min"], | |
| maximum=b["slider_max"]), | |
| gr.update(value=b["slider_min"]), gr.update(value=b["slider_max"]), | |
| gr.update(visible=is_select), gr.update(value=b["options"]), | |
| gr.update(visible=is_ai), gr.update(value=b["system_prompt"]), gr.update(value=b["prompt"]), | |
| gr.update(visible=is_chat), gr.update(value=b.get("system_prompt","")), | |
| gr.update(value=chat_pairs), | |
| gr.update(value=""), | |
| gr.update(visible=is_display), gr.update(choices=var_choices, value=b.get("ref_block") or None), | |
| gr.update(visible=has_output), gr.update(value=b["output"]), | |
| ] | |
| # vars panel + log | |
| updates.append(render_variables_panel(state_json)) | |
| updates.append(render_log(state_json)) | |
| return updates | |
| # Flat list of all outputs for render_ui | |
| all_outputs = [] | |
| for r in block_rows: | |
| all_outputs += [ | |
| r["group"], | |
| r["title"], r["var"], r["type_lbl"], | |
| r["row_value"], r["value"], | |
| r["row_slider"], r["slider"], r["slider_min"], r["slider_max"], | |
| r["row_options"], r["options"], | |
| r["row_ai"], r["system"], r["prompt"], | |
| r["row_chat"], r["chat_system"], r["chatbot"], r["chat_input"], | |
| r["row_display"], r["ref"], | |
| r["row_output"], r["output"], | |
| ] | |
| all_outputs += [vars_md, log_box] | |
| # ββ ADD BLOCK BUTTONS ββ | |
| def make_add_fn(block_type): | |
| def fn(s): | |
| s = add_block(s, block_type) | |
| return [s] + render_ui(s) | |
| return fn | |
| for btn, btype in [ | |
| (btn_add_text, "text-input"), | |
| (btn_add_select, "select-input"), | |
| (btn_add_slider, "slider-input"), | |
| (btn_add_ai, "ai-text"), | |
| (btn_add_chat, "ai-chat"), | |
| (btn_add_transform, "ai-transform"), | |
| (btn_add_display, "display"), | |
| ]: | |
| btn.click(make_add_fn(btype), inputs=[state], outputs=[state]+all_outputs) | |
| # ββ RUN ALL ββ | |
| def run_all_fn(s): | |
| s = run_all_blocks(s) | |
| return [s] + render_ui(s) | |
| run_all_btn.click(run_all_fn, inputs=[state], outputs=[state]+all_outputs) | |
| # ββ PER-BLOCK: title, var, value, system, prompt, options, ref, slider changes ββ | |
| for i, r in enumerate(block_rows): | |
| def make_field_fn(slot): | |
| def fn(s, val): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return s | |
| b = parsed["blocks"][slot] | |
| b["title"] = val | |
| return json.dumps(parsed) | |
| return fn | |
| def make_var_fn(slot): | |
| def fn(s, val): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return s | |
| parsed["blocks"][slot]["var_name"] = val | |
| return json.dumps(parsed) | |
| return fn | |
| def make_value_fn(slot): | |
| def fn(s, val): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return s | |
| parsed["blocks"][slot]["value"] = val | |
| return json.dumps(parsed) | |
| return fn | |
| def make_system_fn(slot): | |
| def fn(s, val): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return s | |
| parsed["blocks"][slot]["system_prompt"] = val | |
| return json.dumps(parsed) | |
| return fn | |
| def make_prompt_fn(slot): | |
| def fn(s, val): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return s | |
| parsed["blocks"][slot]["prompt"] = val | |
| return json.dumps(parsed) | |
| return fn | |
| def make_options_fn(slot): | |
| def fn(s, val): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return s | |
| parsed["blocks"][slot]["options"] = val | |
| return json.dumps(parsed) | |
| return fn | |
| def make_ref_fn(slot): | |
| def fn(s, val): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return s | |
| parsed["blocks"][slot]["ref_block"] = val or "" | |
| return json.dumps(parsed) | |
| return fn | |
| def make_slider_fn(slot): | |
| def fn(s, val): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return s | |
| parsed["blocks"][slot]["slider_val"] = val | |
| parsed["variables"][parsed["blocks"][slot]["var_name"]] = str(val) | |
| return json.dumps(parsed) | |
| return fn | |
| r["title"].change(make_field_fn(i), inputs=[state, r["title"]], outputs=[state]) | |
| r["var"].change(make_var_fn(i), inputs=[state, r["var"]], outputs=[state]) | |
| r["value"].change(make_value_fn(i), inputs=[state, r["value"]], outputs=[state]) | |
| r["system"].change(make_system_fn(i), inputs=[state, r["system"]], outputs=[state]) | |
| r["prompt"].change(make_prompt_fn(i), inputs=[state, r["prompt"]], outputs=[state]) | |
| r["options"].change(make_options_fn(i),inputs=[state, r["options"]], outputs=[state]) | |
| r["ref"].change(make_ref_fn(i), inputs=[state, r["ref"]], outputs=[state]) | |
| r["slider"].change(make_slider_fn(i), inputs=[state, r["slider"]], outputs=[state]) | |
| # ββ RUN SINGLE BLOCK ββ | |
| def make_run_fn(slot): | |
| def fn(s): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return [s] + render_ui(s) | |
| bid = parsed["blocks"][slot]["id"] | |
| s = run_single_block(s, bid) | |
| return [s] + render_ui(s) | |
| return fn | |
| r["run"].click(make_run_fn(i), inputs=[state], outputs=[state]+all_outputs) | |
| # ββ DELETE BLOCK ββ | |
| def make_del_fn(slot): | |
| def fn(s): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return [s] + render_ui(s) | |
| bid = parsed["blocks"][slot]["id"] | |
| s = delete_block(s, bid) | |
| return [s] + render_ui(s) | |
| return fn | |
| r["del"].click(make_del_fn(i), inputs=[state], outputs=[state]+all_outputs) | |
| # ββ CHAT SEND ββ | |
| def make_chat_fn(slot): | |
| def fn(s, msg): | |
| parsed = json.loads(s) | |
| if slot >= len(parsed["blocks"]): | |
| return [s] + render_ui(s) | |
| bid = parsed["blocks"][slot]["id"] | |
| s, _ = send_chat(s, bid, msg) | |
| return [s] + render_ui(s) | |
| return fn | |
| r["chat_send"].click(make_chat_fn(i), inputs=[state, r["chat_input"]], outputs=[state]+all_outputs) | |
| r["chat_input"].submit(make_chat_fn(i), inputs=[state, r["chat_input"]], outputs=[state]+all_outputs) | |
| demo.launch(server_name="0.0.0.0", server_port=7860, css=CSS, ssr_mode=False) | |