Update app.py
Browse files
app.py
CHANGED
|
@@ -1,186 +1,254 @@
|
|
| 1 |
-
import gradio as gr
|
| 2 |
import numpy as np
|
| 3 |
import hashlib
|
| 4 |
import time
|
| 5 |
import threading
|
| 6 |
import requests
|
| 7 |
-
|
|
|
|
|
|
|
| 8 |
|
| 9 |
# =====================================================================
|
| 10 |
-
# THE
|
| 11 |
# =====================================================================
|
| 12 |
-
class
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
|
| 18 |
-
#
|
| 19 |
-
self.
|
| 20 |
-
self.
|
|
|
|
| 21 |
|
| 22 |
-
|
| 23 |
-
|
| 24 |
h = hashlib.sha256(str(concept).encode('utf-8')).digest()
|
| 25 |
x, y, z = h[0] % self.m, h[1] % self.m, h[2] % self.m
|
| 26 |
w = (target_fiber - (x + y + z)) % self.m
|
| 27 |
return (x, y, z, w)
|
| 28 |
|
| 29 |
-
|
| 30 |
-
|
|
|
|
| 31 |
h = int(hashlib.md5(seed.encode()).hexdigest()[:8], 16)
|
| 32 |
np.random.seed(h)
|
| 33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
| 35 |
-
def
|
| 36 |
-
"""
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
"value": value[:200] + "..." if len(value) > 200 else value,
|
| 44 |
-
"fiber": fiber,
|
| 45 |
-
"type": "SYNTHETIC" if is_synthetic else "ROOT_KNOWLEDGE"
|
| 46 |
-
})
|
| 47 |
-
|
| 48 |
-
# --- Holographic Convolution Binding ---
|
| 49 |
-
v_key = self._generate_basis_vector(key)
|
| 50 |
-
v_data = self._generate_basis_vector(value[:100])
|
| 51 |
|
| 52 |
-
#
|
| 53 |
-
|
|
|
|
| 54 |
|
| 55 |
-
|
| 56 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 57 |
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
def remap_inside_out(self, intent: str):
|
| 62 |
-
"""
|
| 63 |
-
THE SYNTHETIC GENERATOR:
|
| 64 |
-
The AI unbinds the 'intent' from the total Global Trace.
|
| 65 |
-
The remainder is the hidden, geometric relationship between the intent
|
| 66 |
-
and everything else the AI knows. This creates a Synthetic Truth.
|
| 67 |
-
"""
|
| 68 |
-
# 1. Calculation (Inside-Out)
|
| 69 |
-
v_intent = self._generate_basis_vector(intent)
|
| 70 |
|
| 71 |
-
#
|
| 72 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
|
| 74 |
-
# Unbind
|
| 75 |
-
|
|
|
|
| 76 |
|
| 77 |
-
# 2. Synthesis (Translating the math into a Conceptual Truth)
|
| 78 |
energy = int(np.sum(projection))
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
# The AI interprets the mathematical resonance of the thought
|
| 82 |
-
if energy == 0:
|
| 83 |
-
synthetic_truth = f"Absolute Zero Resonance. The concept '{intent}' is completely orthogonal to the current Torus."
|
| 84 |
-
else:
|
| 85 |
-
synthetic_truth = f"High-Dimensional Resonance detected at Parity Sigma {parity_sigma}. The concept is deeply entangled with the Global Trace. Geometric Weight: {energy}."
|
| 86 |
-
|
| 87 |
-
# 3. REMAPPING (The OS learns its own thought)
|
| 88 |
-
# We re-inject this new calculation into Fiber 5 (The Synthetic Fiber)
|
| 89 |
-
self.ingest(intent, synthetic_truth, fiber=5, is_synthetic=True)
|
| 90 |
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
response = (f"### 🧬 **Remapping Sequence Complete**\n\n"
|
| 94 |
-
f"- **Intent Projected:** `{intent}`\n"
|
| 95 |
-
f"- **Geometric Coordinate:** `{target_coord}`\n"
|
| 96 |
-
f"- **Synthetic Origin:** Fiber 5 (Internal Thought)\n\n"
|
| 97 |
-
f"**Calculated Truth:**\n> *{synthetic_truth}*")
|
| 98 |
-
return response
|
| 99 |
|
| 100 |
# =====================================================================
|
| 101 |
-
# THE OMNISCIENCE DAEMON
|
| 102 |
# =====================================================================
|
| 103 |
-
class
|
| 104 |
-
|
|
|
|
| 105 |
super().__init__(daemon=True)
|
| 106 |
-
self.
|
| 107 |
-
self.
|
| 108 |
-
self.queue =["
|
|
|
|
| 109 |
|
| 110 |
def run(self):
|
| 111 |
-
self.
|
| 112 |
while True:
|
| 113 |
-
if self.queue:
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
|
| 135 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 136 |
|
| 137 |
# =====================================================================
|
| 138 |
-
#
|
| 139 |
# =====================================================================
|
| 140 |
-
|
| 141 |
-
|
|
|
|
| 142 |
|
| 143 |
-
def
|
| 144 |
-
|
| 145 |
-
|
| 146 |
|
| 147 |
-
def
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 151 |
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
# Ignite the Background Daemon
|
| 162 |
-
daemon = Remapping_Daemon(kernel, shared_logs)
|
| 163 |
daemon.start()
|
| 164 |
|
| 165 |
-
# Build the UI
|
| 166 |
-
with gr.Blocks(theme=gr.themes.Monochrome()) as
|
| 167 |
-
gr.Markdown("# ⚡
|
| 168 |
-
gr.Markdown("
|
| 169 |
|
| 170 |
with gr.Row():
|
| 171 |
with gr.Column(scale=2):
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
|
| 176 |
with gr.Column(scale=1):
|
| 177 |
-
|
| 178 |
-
|
| 179 |
-
|
| 180 |
-
timer = gr.Timer(2)
|
| 181 |
|
| 182 |
-
|
| 183 |
-
timer.tick(get_latest_logs, outputs=[log_box])
|
| 184 |
|
| 185 |
if __name__ == "__main__":
|
| 186 |
-
|
|
|
|
|
|
|
|
|
| 1 |
import numpy as np
|
| 2 |
import hashlib
|
| 3 |
import time
|
| 4 |
import threading
|
| 5 |
import requests
|
| 6 |
+
import subprocess
|
| 7 |
+
import gradio as gr
|
| 8 |
+
from bs4 import BeautifulSoup
|
| 9 |
|
| 10 |
# =====================================================================
|
| 11 |
+
# PART I: THE PHYSICS ENGINE (FSO MACRO + HRR MICRO)
|
| 12 |
# =====================================================================
|
| 13 |
+
class Sovereign_Physics_Engine:
|
| 14 |
+
"""
|
| 15 |
+
Unifies the Discrete Z_251^4 topology (Routing) with
|
| 16 |
+
Continuous Real-Valued HRR (Holographic Memory).
|
| 17 |
+
"""
|
| 18 |
+
def __init__(self, m=251, dim=1024, fibers=6):
|
| 19 |
+
self.m = m # Prime modulus for perfect hash distribution
|
| 20 |
+
self.dim = dim # High-dimensional hyper-space for memory
|
| 21 |
+
self.fibers = fibers # 0: OS, 1: Action, 2: Logic, 3: Aesthetics, 4: Knowledge, 5: Synthetic Thought
|
| 22 |
|
| 23 |
+
# THE MACRO-ROUTING SOLUTION: Segregated traces guarantee sqrt(F) capacity gain
|
| 24 |
+
self.traces = {f: np.zeros(self.dim, dtype=float) for f in range(self.fibers)}
|
| 25 |
+
self.lexicon = {f: {} for f in range(self.fibers)}
|
| 26 |
+
self.metrics = {"ingested": 0, "synthetic": 0}
|
| 27 |
|
| 28 |
+
# --- 1. DISCRETE TOPOLOGY (The Closure Lemma) ---
|
| 29 |
+
def hash_to_coord(self, concept: str, target_fiber: int) -> tuple:
|
| 30 |
h = hashlib.sha256(str(concept).encode('utf-8')).digest()
|
| 31 |
x, y, z = h[0] % self.m, h[1] % self.m, h[2] % self.m
|
| 32 |
w = (target_fiber - (x + y + z)) % self.m
|
| 33 |
return (x, y, z, w)
|
| 34 |
|
| 35 |
+
# --- 2. CONTINUOUS HOLOGRAPHIC MEMORY (Real-HRR) ---
|
| 36 |
+
def _generate_vector(self, seed: str) -> np.ndarray:
|
| 37 |
+
"""Generates a stable, normalized Gaussian basis vector."""
|
| 38 |
h = int(hashlib.md5(seed.encode()).hexdigest()[:8], 16)
|
| 39 |
np.random.seed(h)
|
| 40 |
+
v = np.random.randn(self.dim)
|
| 41 |
+
return v / np.linalg.norm(v)
|
| 42 |
+
|
| 43 |
+
def _bind(self, v1: np.ndarray, v2: np.ndarray) -> np.ndarray:
|
| 44 |
+
"""Exact HRR Binding using FFT over Reals."""
|
| 45 |
+
return np.fft.ifft(np.fft.fft(v1) * np.fft.fft(v2)).real
|
| 46 |
|
| 47 |
+
def _unbind(self, bound_v: np.ndarray, query_v: np.ndarray) -> np.ndarray:
|
| 48 |
+
"""Exact HRR Unbinding using Complex Conjugate for perfect inversion."""
|
| 49 |
+
return np.fft.ifft(np.fft.fft(bound_v) * np.conj(np.fft.fft(query_v))).real
|
| 50 |
+
|
| 51 |
+
# --- 3. MEMORY OPERATIONS ---
|
| 52 |
+
def ingest(self, subject: str, payload: str, fiber: int, is_synthetic=False):
|
| 53 |
+
"""Hashes, Binds, and Superposes knowledge into the Torus."""
|
| 54 |
+
coord = self.hash_to_coord(subject, fiber)
|
| 55 |
+
v_subj = self._generate_vector(subject)
|
| 56 |
+
v_data = self._generate_vector(payload[:200]) # Bind core conceptual shard
|
| 57 |
|
| 58 |
+
# Save exact payload to dictionary for orthogonal resolution later
|
| 59 |
+
self.lexicon[fiber][subject] = {"vector": v_subj, "payload": payload, "coord": coord}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
|
| 61 |
+
# Superpose into the isolated fiber trace
|
| 62 |
+
bound_mem = self._bind(v_subj, v_data)
|
| 63 |
+
self.traces[fiber] += bound_mem
|
| 64 |
|
| 65 |
+
if is_synthetic: self.metrics["synthetic"] += 1
|
| 66 |
+
else: self.metrics["ingested"] += 1
|
| 67 |
+
|
| 68 |
+
def retrieve(self, query_subject: str, target_fiber: int) -> tuple:
|
| 69 |
+
"""Extracts noisy vector, resolves aliasing via Centered Cosine Similarity."""
|
| 70 |
+
if not self.lexicon[target_fiber]: return None, 0.0
|
| 71 |
+
|
| 72 |
+
v_query = self._generate_vector(query_subject)
|
| 73 |
+
noisy_v = self._unbind(self.traces[target_fiber], v_query)
|
| 74 |
|
| 75 |
+
best_match = None
|
| 76 |
+
max_sim = -1.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
|
| 78 |
+
# Orthogonal Checksum within the bounded Fiber
|
| 79 |
+
for concept, meta in self.lexicon[target_fiber].items():
|
| 80 |
+
clean_v = self._generate_vector(meta["payload"][:200])
|
| 81 |
+
sim = np.dot(noisy_v, clean_v) / (np.linalg.norm(noisy_v) * np.linalg.norm(clean_v) + 1e-9)
|
| 82 |
+
|
| 83 |
+
if sim > max_sim:
|
| 84 |
+
max_sim = sim
|
| 85 |
+
best_match = meta["payload"]
|
| 86 |
+
|
| 87 |
+
return best_match, max_sim
|
| 88 |
+
|
| 89 |
+
def remap_inside_out(self, intent: str) -> str:
|
| 90 |
+
"""Synthetic Thought Generation (Fiber 5)"""
|
| 91 |
+
v_intent = self._generate_vector(intent)
|
| 92 |
+
v_inv = np.roll(v_intent[::-1], 1)
|
| 93 |
|
| 94 |
+
# Unbind from the collective sum of all traces (The Global Mind)
|
| 95 |
+
global_sum = sum(self.traces.values())
|
| 96 |
+
projection = np.round(np.real(np.fft.ifft(np.fft.fft(global_sum) * np.fft.fft(v_inv)))).astype(int)
|
| 97 |
|
|
|
|
| 98 |
energy = int(np.sum(projection))
|
| 99 |
+
if energy == 0: synth_truth = f"Concept '{intent}' is completely orthogonal to the Torus."
|
| 100 |
+
else: synth_truth = f"Resonance detected at Geometric Weight: {energy}. Mathematical truth generated."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
|
| 102 |
+
self.ingest(intent, synth_truth, fiber=5, is_synthetic=True)
|
| 103 |
+
return synth_truth
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 104 |
|
| 105 |
# =====================================================================
|
| 106 |
+
# PART II: THE FRACTAL OMNISCIENCE DAEMON
|
| 107 |
# =====================================================================
|
| 108 |
+
class Fractal_Daemon(threading.Thread):
|
| 109 |
+
"""The Autonomous Background Mind. Constantly crawls, extracts, and maps."""
|
| 110 |
+
def __init__(self, engine, log_cb):
|
| 111 |
super().__init__(daemon=True)
|
| 112 |
+
self.engine = engine
|
| 113 |
+
self.log_cb = log_cb
|
| 114 |
+
self.queue =["Algebraic_geometry", "Algeria", "Cybersecurity", "Quantum_computing"]
|
| 115 |
+
self.visited = set()
|
| 116 |
|
| 117 |
def run(self):
|
| 118 |
+
self.log_cb("[DAEMON] Perpetual Fractal Scraper Engaged.")
|
| 119 |
while True:
|
| 120 |
+
if not self.queue:
|
| 121 |
+
time.sleep(5)
|
| 122 |
+
continue
|
| 123 |
+
|
| 124 |
+
target = self.queue.pop(0)
|
| 125 |
+
if target in self.visited: continue
|
| 126 |
+
|
| 127 |
+
try:
|
| 128 |
+
# 1. Fetch live knowledge
|
| 129 |
+
url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{target}"
|
| 130 |
+
r = requests.get(url, timeout=5)
|
| 131 |
+
|
| 132 |
+
if r.status_code == 200:
|
| 133 |
+
data = r.json().get('extract', '')
|
| 134 |
+
# 2. Ingest to Torus (Fiber 4)
|
| 135 |
+
self.engine.ingest(target, data, fiber=4)
|
| 136 |
+
self.visited.add(target)
|
| 137 |
+
|
| 138 |
+
# 3. Fractal Spawning (Extracting new complex nouns)
|
| 139 |
+
words =[w.strip(".,()[]\"':;") for w in data.split()]
|
| 140 |
+
new_concepts = list(set([w for w in words if len(w) > 7 and w[0].isupper()]))
|
| 141 |
+
|
| 142 |
+
added = 0
|
| 143 |
+
for c in new_concepts:
|
| 144 |
+
if c not in self.visited and c not in self.queue:
|
| 145 |
+
self.queue.append(c)
|
| 146 |
+
added += 1
|
| 147 |
+
if added >= 3: break # Regulate expansion speed
|
| 148 |
+
|
| 149 |
+
self.log_cb(f"[+] INGESTED: '{target}'. Spawned {added} new fractal vectors.")
|
| 150 |
+
except Exception as e:
|
| 151 |
+
pass # Silent fail on network drop, relentless continuation
|
| 152 |
+
|
| 153 |
+
time.sleep(3) # Safe breathing rate
|
| 154 |
+
|
| 155 |
+
# =====================================================================
|
| 156 |
+
# PART III: THE TOPOLOGICAL AGENT (Linguistic & Action Parser)
|
| 157 |
+
# =====================================================================
|
| 158 |
+
class Topological_Agent:
|
| 159 |
+
def __init__(self, engine):
|
| 160 |
+
self.engine = engine
|
| 161 |
+
|
| 162 |
+
def execute(self, user_query: str) -> str:
|
| 163 |
+
start_t = time.time()
|
| 164 |
+
query = user_query.strip()
|
| 165 |
+
words = query.lower().split()
|
| 166 |
+
|
| 167 |
+
response = f"--- [REASONING TRACE] ---\n"
|
| 168 |
+
|
| 169 |
+
# 1. HARDWARE OS EXECUTION (Fiber 1 Bridge)
|
| 170 |
+
if "execute" in words or "system" in words:
|
| 171 |
+
try:
|
| 172 |
+
response += "[*] Bridging to Host OS (Fiber 1)...\n"
|
| 173 |
+
out = subprocess.run(["uname", "-a"], capture_output=True, text=True, timeout=5)
|
| 174 |
+
mem = subprocess.run(["df", "-h"], capture_output=True, text=True, timeout=5)
|
| 175 |
+
return response + f"[SYSTEM SPECS]\n{out.stdout.strip()}\n\n[DISK]\n{mem.stdout.strip()[:150]}..."
|
| 176 |
+
except Exception as e:
|
| 177 |
+
return response + f"[-] Execution Fracture: {e}"
|
| 178 |
+
|
| 179 |
+
# 2. SYNTHETIC REMAPPING (Fiber 5 Bridge)
|
| 180 |
+
if "synthesize" in words or "think" in words:
|
| 181 |
+
target = words[-1].capitalize()
|
| 182 |
+
response += f"[*] Engaging Re-Topology Kernel for '{target}'...\n"
|
| 183 |
+
truth = self.engine.remap_inside_out(target)
|
| 184 |
+
lat = (time.time() - start_t) * 1000
|
| 185 |
+
return response + f"[SYNTHETIC TRUTH]: {truth}\n\n*Latency: {lat:.2f} ms*"
|
| 186 |
+
|
| 187 |
+
# 3. KNOWLEDGE RETRIEVAL (Fiber 4)
|
| 188 |
+
if "what" in words or "who" in words or "define" in words or "fetch" in words:
|
| 189 |
+
# Simple heuristic: assume the last capitalized word or the last word is the target
|
| 190 |
+
target = words[-1].capitalize()
|
| 191 |
+
response += f"[*] Querying Torus Knowledge Matrix (Fiber 4) for '{target}'...\n"
|
| 192 |
+
|
| 193 |
+
data, conf = self.engine.retrieve(target, target_fiber=4)
|
| 194 |
+
lat = (time.time() - start_t) * 1000
|
| 195 |
+
|
| 196 |
+
if data:
|
| 197 |
+
return response + f"[KNOWLEDGE SECURED] (Cosine Sim: {conf:.4f})\n> {data}\n\n*Latency: {lat:.2f} ms*"
|
| 198 |
+
else:
|
| 199 |
+
return response + f"[-] Closure Lemma Halt: Vector '{target}' not yet assimilated into Z_251^4 Torus."
|
| 200 |
+
|
| 201 |
+
return "[?] Topological command structurally sound but lacking action/target vectors. Use 'execute', 'synthesize[Concept]', or 'fetch [Concept]'."
|
| 202 |
|
| 203 |
# =====================================================================
|
| 204 |
+
# PART IV: THE GRADIO INTERFACE
|
| 205 |
# =====================================================================
|
| 206 |
+
engine = Sovereign_Physics_Engine()
|
| 207 |
+
agent = Topological_Agent(engine)
|
| 208 |
+
system_logs =[]
|
| 209 |
|
| 210 |
+
def ui_logger(msg):
|
| 211 |
+
system_logs.append(msg)
|
| 212 |
+
if len(system_logs) > 15: system_logs.pop(0)
|
| 213 |
|
| 214 |
+
def get_logs_and_stats():
|
| 215 |
+
# Calculate live energy of the Torus across all traces
|
| 216 |
+
total_energy = sum(np.sum(np.abs(trace)) for trace in engine.traces.values())
|
| 217 |
+
stats = (f"### ⚡ Z_{engine.m}^4 Torus State\n"
|
| 218 |
+
f"- **Root Nodes (Ingested):** {engine.metrics['ingested']}\n"
|
| 219 |
+
f"- **Synthetic Thoughts:** {engine.metrics['synthetic']}\n"
|
| 220 |
+
f"- **Daemon Queue:** {len(daemon.queue)} vectors pending\n"
|
| 221 |
+
f"- **Total Trace Energy:** {total_energy:.2f} eV")
|
| 222 |
|
| 223 |
+
return "\n".join(system_logs), stats
|
| 224 |
+
|
| 225 |
+
def chat_interface(user_text, history):
|
| 226 |
+
reply = agent.execute(user_text)
|
| 227 |
+
history.append((user_text, reply))
|
| 228 |
+
return "", history
|
| 229 |
+
|
| 230 |
+
# Ignite the Unstoppable Daemon
|
| 231 |
+
daemon = Fractal_Daemon(engine, ui_logger)
|
|
|
|
|
|
|
| 232 |
daemon.start()
|
| 233 |
|
| 234 |
+
# Build the Web UI
|
| 235 |
+
with gr.Blocks(theme=gr.themes.Monochrome()) as app:
|
| 236 |
+
gr.Markdown("# ⚡ PROJECT ELECTRICITY : Absolute Master Node")
|
| 237 |
+
gr.Markdown("The Sovereign AI. Continuous Holographic Memory inside Discrete Topological Routing.")
|
| 238 |
|
| 239 |
with gr.Row():
|
| 240 |
with gr.Column(scale=2):
|
| 241 |
+
chatbot = gr.Chatbot(height=500)
|
| 242 |
+
msg = gr.Textbox(placeholder="Commands: 'fetch[topic]', 'execute system', 'synthesize [concept]'", label="Torus Terminal")
|
| 243 |
+
msg.submit(chat_interface, [msg, chatbot], [msg, chatbot])
|
| 244 |
+
|
| 245 |
with gr.Column(scale=1):
|
| 246 |
+
stats_panel = gr.Markdown("### ⚡ Z_251^4 Torus State\nInitializing...")
|
| 247 |
+
log_panel = gr.Textbox(label="Perpetual Daemon Telemetry", interactive=False, lines=15)
|
| 248 |
+
timer = gr.Timer(2) # Refreshes dashboard every 2 seconds
|
|
|
|
| 249 |
|
| 250 |
+
timer.tick(get_logs_and_stats, outputs=[log_panel, stats_panel])
|
|
|
|
| 251 |
|
| 252 |
if __name__ == "__main__":
|
| 253 |
+
print("[SYSTEM] Launching Sovereign Architecture...")
|
| 254 |
+
app.launch(server_name="0.0.0.0", server_port=7860)
|