ChaoticEconomist commited on
Commit
2e5ba5f
Β·
verified Β·
1 Parent(s): 1132571

Upload 3 files

Browse files
Files changed (3) hide show
  1. app.py +566 -0
  2. readme.md +117 -0
  3. requirements.txt +2 -0
app.py ADDED
@@ -0,0 +1,566 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import requests
3
+ import os
4
+ import json
5
+
6
+ # ── API CONFIG ──────────────────────────────────────────────────────────────
7
+ API_KEY = os.getenv("XAI_API_KEY")
8
+ API_URL = "https://api.x.ai/v1/chat/completions"
9
+
10
+ if not API_KEY:
11
+ raise ValueError("❌ XAI_API_KEY not set in Hugging Face Secrets")
12
+
13
+ def call_llm(system_prompt: str, user_prompt: str, history: list = None) -> str:
14
+ try:
15
+ messages = [{"role": "system", "content": system_prompt or "You are a helpful AI assistant."}]
16
+ if history:
17
+ messages.extend(history)
18
+ messages.append({"role": "user", "content": user_prompt})
19
+
20
+ headers = {
21
+ "Authorization": f"Bearer {API_KEY}",
22
+ "Content-Type": "application/json"
23
+ }
24
+ payload = {
25
+ "model": "grok-4-1-fast-non-reasoning",
26
+ "messages": messages,
27
+ "temperature": 0.7,
28
+ "max_tokens": 1000
29
+ }
30
+ response = requests.post(API_URL, headers=headers, json=payload, timeout=30)
31
+ data = response.json()
32
+ if "choices" not in data:
33
+ return f"❌ API Error: {json.dumps(data)}"
34
+ return data["choices"][0]["message"]["content"]
35
+ except Exception as e:
36
+ return f"❌ Request Failed: {str(e)}"
37
+
38
+
39
+ # ── BLOCK STATE ─────────────────────────────────────────────────────────────
40
+ # Each block: {id, type, title, var_name, value/options/prompt/system_prompt/...}
41
+ # Stored as JSON string in a hidden gr.State
42
+
43
+ def empty_state():
44
+ return {"blocks": [], "next_id": 1, "variables": {}, "chat_histories": {}}
45
+
46
+ def add_block(state, block_type):
47
+ s = json.loads(state)
48
+ bid = s["next_id"]
49
+ s["next_id"] += 1
50
+ defaults = {
51
+ "id": bid,
52
+ "type": block_type,
53
+ "title": f"{block_type.replace('-', ' ').title()} {bid}",
54
+ "var_name": f"block_{bid}",
55
+ "value": "",
56
+ "options": "",
57
+ "prompt": "",
58
+ "system_prompt": "",
59
+ "slider_min": 0,
60
+ "slider_max": 100,
61
+ "slider_val": 50,
62
+ "ref_block": "",
63
+ "output": "",
64
+ }
65
+ s["blocks"].append(defaults)
66
+ return json.dumps(s)
67
+
68
+ def delete_block(state, bid):
69
+ s = json.loads(state)
70
+ s["blocks"] = [b for b in s["blocks"] if b["id"] != int(bid)]
71
+ s["chat_histories"].pop(str(bid), None)
72
+ return json.dumps(s)
73
+
74
+ def update_block_field(state, bid, field, value):
75
+ s = json.loads(state)
76
+ for b in s["blocks"]:
77
+ if b["id"] == int(bid):
78
+ b[field] = value
79
+ return json.dumps(s)
80
+
81
+ def resolve_vars(template, variables):
82
+ for k, v in variables.items():
83
+ template = template.replace("{{" + k + "}}", str(v))
84
+ return template
85
+
86
+ def collect_input_values(state):
87
+ """Pull current input block values into variables dict."""
88
+ s = json.loads(state)
89
+ for b in s["blocks"]:
90
+ if b["type"] in ("text-input", "select-input"):
91
+ s["variables"][b["var_name"]] = b["value"]
92
+ elif b["type"] == "slider-input":
93
+ s["variables"][b["var_name"]] = str(b["slider_val"])
94
+ return json.dumps(s)
95
+
96
+ def run_all_blocks(state):
97
+ s = json.loads(state)
98
+ log_lines = ["β–Ά Run All started"]
99
+
100
+ # Step 1: collect inputs
101
+ for b in s["blocks"]:
102
+ if b["type"] == "text-input":
103
+ s["variables"][b["var_name"]] = b["value"]
104
+ log_lines.append(f"βœ… {b['title']}: \"{b['value']}\"")
105
+ elif b["type"] == "select-input":
106
+ s["variables"][b["var_name"]] = b["value"]
107
+ log_lines.append(f"βœ… {b['title']}: \"{b['value']}\"")
108
+ elif b["type"] == "slider-input":
109
+ s["variables"][b["var_name"]] = str(b["slider_val"])
110
+ log_lines.append(f"βœ… {b['title']}: {b['slider_val']}")
111
+
112
+ # Step 2: run AI blocks in order
113
+ for b in s["blocks"]:
114
+ if b["type"] in ("ai-text", "ai-transform"):
115
+ prompt = resolve_vars(b["prompt"] or "Hello", s["variables"])
116
+ log_lines.append(f"⏳ Running {b['title']}…")
117
+ result = call_llm(b["system_prompt"], prompt)
118
+ b["output"] = result
119
+ s["variables"][b["var_name"]] = result
120
+ log_lines.append(f"βœ… {b['title']}: {len(result)} chars")
121
+
122
+ # Step 3: update display blocks
123
+ for b in s["blocks"]:
124
+ if b["type"] == "display":
125
+ ref = b.get("ref_block", "")
126
+ b["output"] = s["variables"].get(ref, "") if ref else ""
127
+
128
+ log_lines.append("βœ… Run All complete")
129
+ s["run_log"] = log_lines
130
+ return json.dumps(s)
131
+
132
+ def run_single_block(state, bid):
133
+ s = json.loads(state)
134
+ bid = int(bid)
135
+ b = next((x for x in s["blocks"] if x["id"] == bid), None)
136
+ if not b:
137
+ return json.dumps(s)
138
+
139
+ if b["type"] in ("text-input", "select-input"):
140
+ s["variables"][b["var_name"]] = b["value"]
141
+ elif b["type"] == "slider-input":
142
+ s["variables"][b["var_name"]] = str(b["slider_val"])
143
+ elif b["type"] in ("ai-text", "ai-transform"):
144
+ prompt = resolve_vars(b["prompt"] or "Hello", s["variables"])
145
+ result = call_llm(b["system_prompt"], prompt)
146
+ b["output"] = result
147
+ s["variables"][b["var_name"]] = result
148
+ elif b["type"] == "display":
149
+ ref = b.get("ref_block", "")
150
+ b["output"] = s["variables"].get(ref, "") if ref else ""
151
+
152
+ return json.dumps(s)
153
+
154
+ def send_chat(state, bid, user_message):
155
+ s = json.loads(state)
156
+ bid_str = str(bid)
157
+ b = next((x for x in s["blocks"] if x["id"] == int(bid)), None)
158
+ if not b or not user_message.strip():
159
+ return json.dumps(s), ""
160
+
161
+ msg = resolve_vars(user_message.strip(), s["variables"])
162
+
163
+ if bid_str not in s["chat_histories"]:
164
+ s["chat_histories"][bid_str] = []
165
+
166
+ s["chat_histories"][bid_str].append({"role": "user", "content": msg})
167
+
168
+ result = call_llm(
169
+ b["system_prompt"] or "You are a helpful assistant.",
170
+ msg,
171
+ history=s["chat_histories"][bid_str][:-1] # all but last user msg
172
+ )
173
+
174
+ s["chat_histories"][bid_str].append({"role": "assistant", "content": result})
175
+ b["output"] = result
176
+ s["variables"][b["var_name"]] = result
177
+
178
+ return json.dumps(s), ""
179
+
180
+
181
+ # ── RENDER HELPERS ──────────────────────────────────────────────────────────
182
+ def render_variables_panel(state):
183
+ s = json.loads(state)
184
+ if not s["variables"]:
185
+ return "No variables yet. Run your app to see values."
186
+ lines = []
187
+ for k, v in s["variables"].items():
188
+ preview = str(v)[:120] + ("…" if len(str(v)) > 120 else "")
189
+ lines.append(f"**`{{{{{k}}}}}`**\n{preview}")
190
+ return "\n\n---\n\n".join(lines)
191
+
192
+ def render_log(state):
193
+ s = json.loads(state)
194
+ log = s.get("run_log", [])
195
+ return "\n".join(log) if log else "Empty. Run to see log."
196
+
197
+ def get_block_output(state, bid):
198
+ s = json.loads(state)
199
+ b = next((x for x in s["blocks"] if x["id"] == int(bid)), None)
200
+ return b["output"] if b else ""
201
+
202
+ def get_chat_history(state, bid):
203
+ s = json.loads(state)
204
+ hist = s["chat_histories"].get(str(bid), [])
205
+ result = []
206
+ for i in range(0, len(hist)-1, 2):
207
+ if i+1 < len(hist):
208
+ result.append((hist[i]["content"], hist[i+1]["content"]))
209
+ return result
210
+
211
+ def get_var_names(state):
212
+ s = json.loads(state)
213
+ return [b["var_name"] for b in s["blocks"] if b["type"] not in ("display",)]
214
+
215
+
216
+ # ── UI ───────────────────────────────────────────────────────────────────────
217
+ CSS = """
218
+ #app-title {font-size: 26px; font-weight: 800; letter-spacing: -0.5px; margin-bottom: 4px;}
219
+ #app-subtitle {color: #888; font-size: 13px; margin-bottom: 20px;}
220
+ .block-card {border: 1px solid #2a2a36; border-radius: 12px; padding: 16px; margin-bottom: 14px; background: #16161a;}
221
+ .block-card-header {font-weight: 700; font-size: 14px; margin-bottom: 10px; display: flex; align-items: center; gap: 8px;}
222
+ .run-btn {background: #34d399 !important; color: #0a2318 !important; font-weight: 700 !important;}
223
+ .add-btn {background: #7c6fff !important; color: white !important;}
224
+ .output-box textarea {font-family: 'Courier New', monospace !important; font-size: 12px !important;}
225
+ .var-panel {font-size: 12px; line-height: 1.8;}
226
+ """
227
+
228
+ with gr.Blocks(css=CSS, title="GrokFlow β€” AI App Builder") as demo:
229
+
230
+ state = gr.State(json.dumps(empty_state()))
231
+
232
+ # ── HEADER ──
233
+ with gr.Row():
234
+ with gr.Column():
235
+ gr.HTML('<div id="app-title">⚑ GrokFlow</div><div id="app-subtitle">Build AI apps with blocks β€” powered by xAI Grok</div>')
236
+ with gr.Column(scale=0, min_width=120):
237
+ run_all_btn = gr.Button("β–Ά Run All", variant="primary", elem_classes=["run-btn"])
238
+
239
+ gr.Markdown("---")
240
+
241
+ # ── ADD BLOCK ROW ──
242
+ with gr.Row():
243
+ gr.Markdown("**Add a block:**")
244
+ btn_add_text = gr.Button("πŸ“ Text Input", size="sm")
245
+ btn_add_select = gr.Button("☰ Dropdown", size="sm")
246
+ btn_add_slider = gr.Button("β—‰ Slider", size="sm")
247
+ btn_add_ai = gr.Button("✦ AI Generate", size="sm", elem_classes=["add-btn"])
248
+ btn_add_chat = gr.Button("πŸ’¬ AI Chat", size="sm", elem_classes=["add-btn"])
249
+ btn_add_transform= gr.Button("⟳ AI Transform", size="sm", elem_classes=["add-btn"])
250
+ btn_add_display = gr.Button("β–¦ Display", size="sm")
251
+
252
+ gr.Markdown("---")
253
+
254
+ # ── DYNAMIC BLOCKS AREA ──
255
+ # We render up to 10 blocks. Each slot is hidden until a block occupies it.
256
+ # State drives visibility and values.
257
+
258
+ MAX_BLOCKS = 10
259
+ block_rows = []
260
+
261
+ for i in range(MAX_BLOCKS):
262
+ with gr.Group(visible=False) as grp:
263
+ with gr.Row():
264
+ b_title = gr.Textbox(label="Block title", scale=3, interactive=True)
265
+ b_var = gr.Textbox(label="Variable name", scale=2, interactive=True)
266
+ b_type_lbl = gr.Textbox(label="Type", scale=1, interactive=False)
267
+ b_run = gr.Button("β–Ά", size="sm", scale=0, min_width=40)
268
+ b_del = gr.Button("βœ•", size="sm", scale=0, min_width=40, variant="stop")
269
+
270
+ # Input-specific
271
+ with gr.Row(visible=True) as row_value:
272
+ b_value = gr.Textbox(label="Value / Placeholder", scale=1, interactive=True)
273
+
274
+ # Slider-specific
275
+ with gr.Row(visible=False) as row_slider:
276
+ b_slider = gr.Slider(minimum=0, maximum=100, value=50, label="Value", interactive=True)
277
+ b_slider_min = gr.Number(value=0, label="Min", precision=0, interactive=True, scale=0, min_width=80)
278
+ b_slider_max = gr.Number(value=100, label="Max", precision=0, interactive=True, scale=0, min_width=80)
279
+
280
+ # Select options
281
+ with gr.Row(visible=False) as row_options:
282
+ b_options = gr.Textbox(label="Options (one per line)", lines=4, interactive=True)
283
+
284
+ # AI-specific
285
+ with gr.Row(visible=False) as row_ai:
286
+ with gr.Column():
287
+ b_system = gr.Textbox(label="System prompt (optional)", interactive=True)
288
+ b_prompt = gr.Textbox(label="Prompt β€” use {{var_name}} to reference blocks", lines=4, interactive=True)
289
+
290
+ # Chat
291
+ with gr.Row(visible=False) as row_chat:
292
+ with gr.Column():
293
+ b_chat_system = gr.Textbox(label="System prompt", interactive=True)
294
+ b_chatbot = gr.Chatbot(label="Chat", height=200)
295
+ with gr.Row():
296
+ b_chat_input = gr.Textbox(label="Message", scale=4, interactive=True)
297
+ b_chat_send = gr.Button("Send", scale=1, size="sm")
298
+
299
+ # Display ref
300
+ with gr.Row(visible=False) as row_display:
301
+ b_ref = gr.Dropdown(label="Show variable", choices=[], interactive=True)
302
+
303
+ # Output
304
+ with gr.Row(visible=False) as row_output:
305
+ b_output = gr.Textbox(label="Output", lines=4, interactive=False, elem_classes=["output-box"])
306
+
307
+ block_rows.append({
308
+ "group": grp,
309
+ "title": b_title, "var": b_var, "type_lbl": b_type_lbl,
310
+ "run": b_run, "del": b_del,
311
+ "row_value": row_value, "value": b_value,
312
+ "row_slider": row_slider, "slider": b_slider, "slider_min": b_slider_min, "slider_max": b_slider_max,
313
+ "row_options": row_options, "options": b_options,
314
+ "row_ai": row_ai, "system": b_system, "prompt": b_prompt,
315
+ "row_chat": row_chat, "chat_system": b_chat_system, "chatbot": b_chatbot,
316
+ "chat_input": b_chat_input, "chat_send": b_chat_send,
317
+ "row_display": row_display, "ref": b_ref,
318
+ "row_output": row_output, "output": b_output,
319
+ })
320
+
321
+ gr.Markdown("---")
322
+
323
+ # ── BOTTOM PANELS ──
324
+ with gr.Row():
325
+ with gr.Column(scale=1):
326
+ gr.Markdown("**Variables**")
327
+ vars_md = gr.Markdown("No variables yet.", elem_classes=["var-panel"])
328
+ with gr.Column(scale=1):
329
+ gr.Markdown("**Run Log**")
330
+ log_box = gr.Textbox(value="Empty. Run to see log.", lines=8, interactive=False,
331
+ elem_classes=["output-box"])
332
+
333
+ # ── STATE β†’ UI RENDER ───────────────────────────────────────────────────
334
+ def render_ui(state_json):
335
+ s = json.loads(state_json)
336
+ blocks = s["blocks"]
337
+ updates = []
338
+
339
+ for i in range(MAX_BLOCKS):
340
+ if i >= len(blocks):
341
+ # Hide this slot
342
+ updates += [
343
+ gr.update(visible=False), # group
344
+ gr.update(value=""), gr.update(value=""), gr.update(value=""), # title, var, type_lbl
345
+ gr.update(visible=True), gr.update(value=""), # row_value, value
346
+ gr.update(visible=False), gr.update(value=50), # row_slider, slider
347
+ gr.update(value=0), gr.update(value=100), # slider_min, slider_max
348
+ gr.update(visible=False), gr.update(value=""), # row_options, options
349
+ gr.update(visible=False), gr.update(value=""), gr.update(value=""), # row_ai, system, prompt
350
+ gr.update(visible=False), gr.update(value=""), # row_chat, chat_system
351
+ gr.update(value=[]), # chatbot
352
+ gr.update(value=""), # chat_input
353
+ gr.update(visible=False), gr.update(choices=[], value=None), # row_display, ref
354
+ gr.update(visible=False), gr.update(value=""), # row_output, output
355
+ ]
356
+ continue
357
+
358
+ b = blocks[i]
359
+ t = b["type"]
360
+ is_input = t in ("text-input", "select-input")
361
+ is_slider = t == "slider-input"
362
+ is_select = t == "select-input"
363
+ is_ai = t in ("ai-text", "ai-transform")
364
+ is_chat = t == "ai-chat"
365
+ is_display = t == "display"
366
+ has_output = t in ("ai-text", "ai-transform", "display")
367
+
368
+ var_choices = [b2["var_name"] for b2 in blocks if b2["id"] != b["id"] and b2["type"] != "display"]
369
+ chat_hist = s["chat_histories"].get(str(b["id"]), [])
370
+ chat_pairs = []
371
+ for j in range(0, len(chat_hist)-1, 2):
372
+ if j+1 < len(chat_hist):
373
+ chat_pairs.append((chat_hist[j]["content"], chat_hist[j+1]["content"]))
374
+
375
+ updates += [
376
+ gr.update(visible=True), # group
377
+ gr.update(value=b["title"]),
378
+ gr.update(value=b["var_name"]),
379
+ gr.update(value=t),
380
+ gr.update(visible=is_input), gr.update(value=b["value"]),
381
+ gr.update(visible=is_slider), gr.update(value=b["slider_val"],
382
+ minimum=b["slider_min"],
383
+ maximum=b["slider_max"]),
384
+ gr.update(value=b["slider_min"]), gr.update(value=b["slider_max"]),
385
+ gr.update(visible=is_select), gr.update(value=b["options"]),
386
+ gr.update(visible=is_ai), gr.update(value=b["system_prompt"]), gr.update(value=b["prompt"]),
387
+ gr.update(visible=is_chat), gr.update(value=b.get("system_prompt","")),
388
+ gr.update(value=chat_pairs),
389
+ gr.update(value=""),
390
+ gr.update(visible=is_display), gr.update(choices=var_choices, value=b.get("ref_block") or None),
391
+ gr.update(visible=has_output), gr.update(value=b["output"]),
392
+ ]
393
+
394
+ # vars panel + log
395
+ updates.append(render_variables_panel(state_json))
396
+ updates.append(render_log(state_json))
397
+ return updates
398
+
399
+ # Flat list of all outputs for render_ui
400
+ all_outputs = []
401
+ for r in block_rows:
402
+ all_outputs += [
403
+ r["group"],
404
+ r["title"], r["var"], r["type_lbl"],
405
+ r["row_value"], r["value"],
406
+ r["row_slider"], r["slider"], r["slider_min"], r["slider_max"],
407
+ r["row_options"], r["options"],
408
+ r["row_ai"], r["system"], r["prompt"],
409
+ r["row_chat"], r["chat_system"], r["chatbot"], r["chat_input"],
410
+ r["row_display"], r["ref"],
411
+ r["row_output"], r["output"],
412
+ ]
413
+ all_outputs += [vars_md, log_box]
414
+
415
+ # ── ADD BLOCK BUTTONS ──
416
+ def make_add_fn(block_type):
417
+ def fn(s):
418
+ s = add_block(s, block_type)
419
+ return [s] + render_ui(s)
420
+ return fn
421
+
422
+ for btn, btype in [
423
+ (btn_add_text, "text-input"),
424
+ (btn_add_select, "select-input"),
425
+ (btn_add_slider, "slider-input"),
426
+ (btn_add_ai, "ai-text"),
427
+ (btn_add_chat, "ai-chat"),
428
+ (btn_add_transform, "ai-transform"),
429
+ (btn_add_display, "display"),
430
+ ]:
431
+ btn.click(make_add_fn(btype), inputs=[state], outputs=[state]+all_outputs)
432
+
433
+ # ── RUN ALL ──
434
+ def run_all_fn(s):
435
+ s = run_all_blocks(s)
436
+ return [s] + render_ui(s)
437
+
438
+ run_all_btn.click(run_all_fn, inputs=[state], outputs=[state]+all_outputs)
439
+
440
+ # ── PER-BLOCK: title, var, value, system, prompt, options, ref, slider changes ──
441
+ for i, r in enumerate(block_rows):
442
+
443
+ def make_field_fn(slot):
444
+ def fn(s, val):
445
+ parsed = json.loads(s)
446
+ if slot >= len(parsed["blocks"]):
447
+ return s
448
+ b = parsed["blocks"][slot]
449
+ b["title"] = val
450
+ return json.dumps(parsed)
451
+ return fn
452
+
453
+ def make_var_fn(slot):
454
+ def fn(s, val):
455
+ parsed = json.loads(s)
456
+ if slot >= len(parsed["blocks"]):
457
+ return s
458
+ parsed["blocks"][slot]["var_name"] = val
459
+ return json.dumps(parsed)
460
+ return fn
461
+
462
+ def make_value_fn(slot):
463
+ def fn(s, val):
464
+ parsed = json.loads(s)
465
+ if slot >= len(parsed["blocks"]):
466
+ return s
467
+ parsed["blocks"][slot]["value"] = val
468
+ return json.dumps(parsed)
469
+ return fn
470
+
471
+ def make_system_fn(slot):
472
+ def fn(s, val):
473
+ parsed = json.loads(s)
474
+ if slot >= len(parsed["blocks"]):
475
+ return s
476
+ parsed["blocks"][slot]["system_prompt"] = val
477
+ return json.dumps(parsed)
478
+ return fn
479
+
480
+ def make_prompt_fn(slot):
481
+ def fn(s, val):
482
+ parsed = json.loads(s)
483
+ if slot >= len(parsed["blocks"]):
484
+ return s
485
+ parsed["blocks"][slot]["prompt"] = val
486
+ return json.dumps(parsed)
487
+ return fn
488
+
489
+ def make_options_fn(slot):
490
+ def fn(s, val):
491
+ parsed = json.loads(s)
492
+ if slot >= len(parsed["blocks"]):
493
+ return s
494
+ parsed["blocks"][slot]["options"] = val
495
+ return json.dumps(parsed)
496
+ return fn
497
+
498
+ def make_ref_fn(slot):
499
+ def fn(s, val):
500
+ parsed = json.loads(s)
501
+ if slot >= len(parsed["blocks"]):
502
+ return s
503
+ parsed["blocks"][slot]["ref_block"] = val or ""
504
+ return json.dumps(parsed)
505
+ return fn
506
+
507
+ def make_slider_fn(slot):
508
+ def fn(s, val):
509
+ parsed = json.loads(s)
510
+ if slot >= len(parsed["blocks"]):
511
+ return s
512
+ parsed["blocks"][slot]["slider_val"] = val
513
+ parsed["variables"][parsed["blocks"][slot]["var_name"]] = str(val)
514
+ return json.dumps(parsed)
515
+ return fn
516
+
517
+ r["title"].change(make_field_fn(i), inputs=[state, r["title"]], outputs=[state])
518
+ r["var"].change(make_var_fn(i), inputs=[state, r["var"]], outputs=[state])
519
+ r["value"].change(make_value_fn(i), inputs=[state, r["value"]], outputs=[state])
520
+ r["system"].change(make_system_fn(i), inputs=[state, r["system"]], outputs=[state])
521
+ r["prompt"].change(make_prompt_fn(i), inputs=[state, r["prompt"]], outputs=[state])
522
+ r["options"].change(make_options_fn(i),inputs=[state, r["options"]], outputs=[state])
523
+ r["ref"].change(make_ref_fn(i), inputs=[state, r["ref"]], outputs=[state])
524
+ r["slider"].change(make_slider_fn(i), inputs=[state, r["slider"]], outputs=[state])
525
+
526
+ # ── RUN SINGLE BLOCK ──
527
+ def make_run_fn(slot):
528
+ def fn(s):
529
+ parsed = json.loads(s)
530
+ if slot >= len(parsed["blocks"]):
531
+ return [s] + render_ui(s)
532
+ bid = parsed["blocks"][slot]["id"]
533
+ s = run_single_block(s, bid)
534
+ return [s] + render_ui(s)
535
+ return fn
536
+
537
+ r["run"].click(make_run_fn(i), inputs=[state], outputs=[state]+all_outputs)
538
+
539
+ # ── DELETE BLOCK ──
540
+ def make_del_fn(slot):
541
+ def fn(s):
542
+ parsed = json.loads(s)
543
+ if slot >= len(parsed["blocks"]):
544
+ return [s] + render_ui(s)
545
+ bid = parsed["blocks"][slot]["id"]
546
+ s = delete_block(s, bid)
547
+ return [s] + render_ui(s)
548
+ return fn
549
+
550
+ r["del"].click(make_del_fn(i), inputs=[state], outputs=[state]+all_outputs)
551
+
552
+ # ── CHAT SEND ──
553
+ def make_chat_fn(slot):
554
+ def fn(s, msg):
555
+ parsed = json.loads(s)
556
+ if slot >= len(parsed["blocks"]):
557
+ return [s] + render_ui(s)
558
+ bid = parsed["blocks"][slot]["id"]
559
+ s, _ = send_chat(s, bid, msg)
560
+ return [s] + render_ui(s)
561
+ return fn
562
+
563
+ r["chat_send"].click(make_chat_fn(i), inputs=[state, r["chat_input"]], outputs=[state]+all_outputs)
564
+ r["chat_input"].submit(make_chat_fn(i), inputs=[state, r["chat_input"]], outputs=[state]+all_outputs)
565
+
566
+ demo.launch()
readme.md ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ⚑ Grokflow
2
+
3
+ **Build AI-powered apps visually β€” no code needed.**
4
+
5
+ Grokflow is a block-based AI app builder that runs on Hugging Face Spaces, powered by xAI's Grok model. Snap together input blocks, AI blocks, and output blocks to create custom AI workflows in minutes.
6
+
7
+ ---
8
+
9
+ ## What it does
10
+
11
+ Grokflow lets you build AI apps the same way you'd build with Lego β€” one block at a time. Connect blocks using `{{variable}}` references and hit **Run All** to execute the whole pipeline.
12
+
13
+ ![Grokflow screenshot](screenshot.png)
14
+
15
+ ---
16
+
17
+ ## Block types
18
+
19
+ | Block | Type | Description |
20
+ |---|---|---|
21
+ | πŸ“ Text Input | Input | Free-form text the user fills in |
22
+ | ☰ Dropdown | Input | Choose from a list of options |
23
+ | β—‰ Slider | Input | Numeric value picker with min/max/step |
24
+ | ✦ AI Generate | AI | Generate text from a prompt template |
25
+ | πŸ’¬ AI Chat | AI | Full multi-turn conversation with Grok |
26
+ | ⟳ AI Transform | AI | Rewrite or edit text with a prompt |
27
+ | β–¦ Display | Output | Show the value of any variable |
28
+
29
+ ---
30
+
31
+ ## How to use
32
+
33
+ ### 1. Add blocks
34
+ Click any block button in the toolbar to add it to your canvas.
35
+
36
+ ### 2. Name your variables
37
+ Each block has a **variable name** (e.g. `block_1`). This is how other blocks reference its value.
38
+
39
+ ### 3. Connect blocks
40
+ In any AI prompt, use `{{variable_name}}` to inject another block's value. For example:
41
+
42
+ ```
43
+ Write a {{tone}} blog post about {{topic}}.
44
+ ```
45
+
46
+ Where `tone` and `topic` are variable names from your input blocks.
47
+
48
+ ### 4. Run
49
+ - Click **β–Ά** on a single block to run just that block
50
+ - Click **β–Ά Run All** to execute all blocks in order (inputs β†’ AI β†’ display)
51
+
52
+ ---
53
+
54
+ ## Example app: Blog Post Generator
55
+
56
+ | Block | Type | Config |
57
+ |---|---|---|
58
+ | Topic | Text Input | Variable: `topic` |
59
+ | Tone | Dropdown | Options: Professional, Casual, Funny Β· Variable: `tone` |
60
+ | Writer | AI Generate | Prompt: `Write a {{tone}} 3-paragraph blog post about {{topic}}.` |
61
+ | Result | Display | Shows: `{{writer}}` |
62
+
63
+ Hit **Run All** β†’ instant blog post.
64
+
65
+ ---
66
+
67
+ ## Setup on Hugging Face Spaces
68
+
69
+ ### Files needed
70
+ ```
71
+ your-space/
72
+ β”œβ”€β”€ app.py
73
+ └── requirements.txt
74
+ ```
75
+
76
+ ### Add your API key
77
+ 1. Go to your Space β†’ **Settings β†’ Variables and secrets**
78
+ 2. Add a new secret:
79
+ - Name: `XAI_API_KEY`
80
+ - Value: your xAI API key from [console.x.ai](https://console.x.ai)
81
+
82
+ ### Deploy
83
+ Push both files β€” the Space builds automatically. Takes about 60 seconds.
84
+
85
+ ---
86
+
87
+ ## Model
88
+
89
+ Grokflow uses **xAI Grok** via the OpenAI-compatible API endpoint:
90
+
91
+ ```
92
+ https://api.x.ai/v1/chat/completions
93
+ model: grok-4-1-fast-non-reasoning
94
+ ```
95
+
96
+ ---
97
+
98
+ ## Tech stack
99
+
100
+ - **[Gradio](https://gradio.app)** β€” UI framework
101
+ - **[xAI Grok API](https://console.x.ai)** β€” language model
102
+ - **Python** β€” all logic, no JavaScript, no HTML files
103
+
104
+ ---
105
+
106
+ ## Tips
107
+
108
+ - Variable names must be a single word with no spaces (e.g. `my_topic` not `my topic`)
109
+ - AI blocks run in the order they appear β€” put inputs first, AI blocks second, display blocks last
110
+ - The AI Chat block keeps full conversation history within a session
111
+ - Use the system prompt field to give each AI block a specific persona or set of instructions
112
+
113
+ ---
114
+
115
+ ## License
116
+
117
+ MIT β€” free to use, fork, and build on.
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ gradio
2
+ requests