Tgi.node / app.py
LOOFYYLO's picture
Update app.py
428e2c1 verified
import numpy as np
import hashlib
import time
import threading
import requests
import subprocess
import gradio as gr
from bs4 import BeautifulSoup
# =====================================================================
# PART I: THE PHYSICS ENGINE (FSO MACRO + HRR MICRO)
# =====================================================================
class Sovereign_Physics_Engine:
"""
Unifies the Discrete Z_251^4 topology (Routing) with
Continuous Real-Valued HRR (Holographic Memory).
"""
def __init__(self, m=251, dim=1024, fibers=6):
self.m = m # Prime modulus for perfect hash distribution
self.dim = dim # High-dimensional hyper-space for memory
self.fibers = fibers # 0: OS, 1: Action, 2: Logic, 3: Aesthetics, 4: Knowledge, 5: Synthetic Thought
# THE MACRO-ROUTING SOLUTION: Segregated traces guarantee sqrt(F) capacity gain
self.traces = {f: np.zeros(self.dim, dtype=float) for f in range(self.fibers)}
self.lexicon = {f: {} for f in range(self.fibers)}
self.metrics = {"ingested": 0, "synthetic": 0}
# --- 1. DISCRETE TOPOLOGY (The Closure Lemma) ---
def hash_to_coord(self, concept: str, target_fiber: int) -> tuple:
h = hashlib.sha256(str(concept).encode('utf-8')).digest()
x, y, z = h[0] % self.m, h[1] % self.m, h[2] % self.m
w = (target_fiber - (x + y + z)) % self.m
return (x, y, z, w)
# --- 2. CONTINUOUS HOLOGRAPHIC MEMORY (Real-HRR) ---
def _generate_vector(self, seed: str) -> np.ndarray:
"""Generates a stable, normalized Gaussian basis vector."""
h = int(hashlib.md5(seed.encode()).hexdigest()[:8], 16)
np.random.seed(h)
v = np.random.randn(self.dim)
return v / np.linalg.norm(v)
def _bind(self, v1: np.ndarray, v2: np.ndarray) -> np.ndarray:
"""Exact HRR Binding using FFT over Reals."""
return np.fft.ifft(np.fft.fft(v1) * np.fft.fft(v2)).real
def _unbind(self, bound_v: np.ndarray, query_v: np.ndarray) -> np.ndarray:
"""Exact HRR Unbinding using Complex Conjugate for perfect inversion."""
return np.fft.ifft(np.fft.fft(bound_v) * np.conj(np.fft.fft(query_v))).real
# --- 3. MEMORY OPERATIONS ---
def ingest(self, subject: str, payload: str, fiber: int, is_synthetic=False):
"""Hashes, Binds, and Superposes knowledge into the Torus."""
coord = self.hash_to_coord(subject, fiber)
v_subj = self._generate_vector(subject)
v_data = self._generate_vector(payload[:200]) # Bind core conceptual shard
# Save exact payload to dictionary for orthogonal resolution later
self.lexicon[fiber][subject] = {"vector": v_subj, "payload": payload, "coord": coord}
# Superpose into the isolated fiber trace
bound_mem = self._bind(v_subj, v_data)
self.traces[fiber] += bound_mem
if is_synthetic: self.metrics["synthetic"] += 1
else: self.metrics["ingested"] += 1
def retrieve(self, query_subject: str, target_fiber: int) -> tuple:
"""Extracts noisy vector, resolves aliasing via Centered Cosine Similarity."""
if not self.lexicon[target_fiber]: return None, 0.0
v_query = self._generate_vector(query_subject)
noisy_v = self._unbind(self.traces[target_fiber], v_query)
best_match = None
max_sim = -1.0
# Orthogonal Checksum within the bounded Fiber
for concept, meta in self.lexicon[target_fiber].items():
clean_v = self._generate_vector(meta["payload"][:200])
sim = np.dot(noisy_v, clean_v) / (np.linalg.norm(noisy_v) * np.linalg.norm(clean_v) + 1e-9)
if sim > max_sim:
max_sim = sim
best_match = meta["payload"]
return best_match, max_sim
def remap_inside_out(self, intent: str) -> str:
"""Synthetic Thought Generation (Fiber 5)"""
v_intent = self._generate_vector(intent)
v_inv = np.roll(v_intent[::-1], 1)
# Unbind from the collective sum of all traces (The Global Mind)
global_sum = sum(self.traces.values())
projection = np.round(np.real(np.fft.ifft(np.fft.fft(global_sum) * np.fft.fft(v_inv)))).astype(int)
energy = int(np.sum(projection))
if energy == 0: synth_truth = f"Concept '{intent}' is completely orthogonal to the Torus."
else: synth_truth = f"Resonance detected at Geometric Weight: {energy}. Mathematical truth generated."
self.ingest(intent, synth_truth, fiber=5, is_synthetic=True)
return synth_truth
# =====================================================================
# PART II: THE FRACTAL OMNISCIENCE DAEMON
# =====================================================================
class Fractal_Daemon(threading.Thread):
"""The Autonomous Background Mind. Constantly crawls, extracts, and maps."""
def __init__(self, engine, log_cb):
super().__init__(daemon=True)
self.engine = engine
self.log_cb = log_cb
self.queue =["Algebraic_geometry", "Algeria", "Cybersecurity", "Quantum_computing"]
self.visited = set()
def run(self):
self.log_cb("[DAEMON] Perpetual Fractal Scraper Engaged.")
while True:
if not self.queue:
time.sleep(5)
continue
target = self.queue.pop(0)
if target in self.visited: continue
try:
# 1. Fetch live knowledge
url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{target}"
r = requests.get(url, timeout=5)
if r.status_code == 200:
data = r.json().get('extract', '')
# 2. Ingest to Torus (Fiber 4)
self.engine.ingest(target, data, fiber=4)
self.visited.add(target)
# 3. Fractal Spawning (Extracting new complex nouns)
words =[w.strip(".,()[]\"':;") for w in data.split()]
new_concepts = list(set([w for w in words if len(w) > 7 and w[0].isupper()]))
added = 0
for c in new_concepts:
if c not in self.visited and c not in self.queue:
self.queue.append(c)
added += 1
if added >= 3: break # Regulate expansion speed
self.log_cb(f"[+] INGESTED: '{target}'. Spawned {added} new fractal vectors.")
except Exception as e:
pass # Silent fail on network drop, relentless continuation
time.sleep(3) # Safe breathing rate
# =====================================================================
# PART III: THE TOPOLOGICAL AGENT (Linguistic & Action Parser)
# =====================================================================
class Topological_Agent:
def __init__(self, engine):
self.engine = engine
def execute(self, user_query: str) -> str:
start_t = time.time()
query = user_query.strip()
words = query.lower().split()
response = f"--- [REASONING TRACE] ---\n"
# 1. HARDWARE OS EXECUTION (Fiber 1 Bridge)
if "execute" in words or "system" in words:
try:
response += "[*] Bridging to Host OS (Fiber 1)...\n"
out = subprocess.run(["uname", "-a"], capture_output=True, text=True, timeout=5)
mem = subprocess.run(["df", "-h"], capture_output=True, text=True, timeout=5)
return response + f"[SYSTEM SPECS]\n{out.stdout.strip()}\n\n[DISK]\n{mem.stdout.strip()[:150]}..."
except Exception as e:
return response + f"[-] Execution Fracture: {e}"
# 2. SYNTHETIC REMAPPING (Fiber 5 Bridge)
if "synthesize" in words or "think" in words:
target = words[-1].capitalize()
response += f"[*] Engaging Re-Topology Kernel for '{target}'...\n"
truth = self.engine.remap_inside_out(target)
lat = (time.time() - start_t) * 1000
return response + f"[SYNTHETIC TRUTH]: {truth}\n\n*Latency: {lat:.2f} ms*"
# 3. KNOWLEDGE RETRIEVAL (Fiber 4)
if "what" in words or "who" in words or "define" in words or "fetch" in words:
# Simple heuristic: assume the last capitalized word or the last word is the target
target = words[-1].capitalize()
response += f"[*] Querying Torus Knowledge Matrix (Fiber 4) for '{target}'...\n"
data, conf = self.engine.retrieve(target, target_fiber=4)
lat = (time.time() - start_t) * 1000
if data:
return response + f"[KNOWLEDGE SECURED] (Cosine Sim: {conf:.4f})\n> {data}\n\n*Latency: {lat:.2f} ms*"
else:
return response + f"[-] Closure Lemma Halt: Vector '{target}' not yet assimilated into Z_251^4 Torus."
return "[?] Topological command structurally sound but lacking action/target vectors. Use 'execute', 'synthesize[Concept]', or 'fetch [Concept]'."
# =====================================================================
# PART IV: THE GRADIO INTERFACE
# =====================================================================
engine = Sovereign_Physics_Engine()
agent = Topological_Agent(engine)
system_logs =[]
def ui_logger(msg):
system_logs.append(msg)
if len(system_logs) > 15: system_logs.pop(0)
def get_logs_and_stats():
# Calculate live energy of the Torus across all traces
total_energy = sum(np.sum(np.abs(trace)) for trace in engine.traces.values())
stats = (f"### ⚡ Z_{engine.m}^4 Torus State\n"
f"- **Root Nodes (Ingested):** {engine.metrics['ingested']}\n"
f"- **Synthetic Thoughts:** {engine.metrics['synthetic']}\n"
f"- **Daemon Queue:** {len(daemon.queue)} vectors pending\n"
f"- **Total Trace Energy:** {total_energy:.2f} eV")
return "\n".join(system_logs), stats
def chat_interface(user_text, history):
reply = agent.execute(user_text)
history.append((user_text, reply))
return "", history
# Ignite the Unstoppable Daemon
daemon = Fractal_Daemon(engine, ui_logger)
daemon.start()
# Build the Web UI
with gr.Blocks(theme=gr.themes.Monochrome()) as app:
gr.Markdown("# ⚡ PROJECT ELECTRICITY : Absolute Master Node")
gr.Markdown("The Sovereign AI. Continuous Holographic Memory inside Discrete Topological Routing.")
with gr.Row():
with gr.Column(scale=2):
chatbot = gr.Chatbot(height=500)
msg = gr.Textbox(placeholder="Commands: 'fetch[topic]', 'execute system', 'synthesize [concept]'", label="Torus Terminal")
msg.submit(chat_interface, [msg, chatbot], [msg, chatbot])
with gr.Column(scale=1):
stats_panel = gr.Markdown("### ⚡ Z_251^4 Torus State\nInitializing...")
log_panel = gr.Textbox(label="Perpetual Daemon Telemetry", interactive=False, lines=15)
timer = gr.Timer(2) # Refreshes dashboard every 2 seconds
timer.tick(get_logs_and_stats, outputs=[log_panel, stats_panel])
if __name__ == "__main__":
print("[SYSTEM] Launching Sovereign Architecture...")
app.launch(server_name="0.0.0.0", server_port=7860)