π§ Deep QVAC integration: tool-use agent, AI risk assessment, semantic contacts, OCRβpayment pipeline, voice agent with all 6 modules, auto-indexing RAG
Browse files- src/main/ai/qvacEngine.ts +612 -428
- src/main/main.ts +163 -43
- src/main/preload.ts +20 -10
src/main/ai/qvacEngine.ts
CHANGED
|
@@ -1,18 +1,49 @@
|
|
| 1 |
/**
|
| 2 |
-
* SolVox β QVAC AI Engine
|
| 3 |
*
|
| 4 |
-
*
|
| 5 |
-
* local
|
| 6 |
*
|
| 7 |
-
*
|
| 8 |
-
*
|
| 9 |
-
*
|
| 10 |
-
*
|
| 11 |
-
*
|
| 12 |
-
*
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
*
|
| 14 |
* ALL AI runs 100% locally via QVAC's Vulkan-accelerated engine.
|
| 15 |
-
* No data ever leaves the device.
|
| 16 |
*/
|
| 17 |
|
| 18 |
import { QVAC } from '@qvac/sdk';
|
|
@@ -26,40 +57,94 @@ import * as path from 'path';
|
|
| 26 |
import * as fs from 'fs';
|
| 27 |
import { app } from 'electron';
|
| 28 |
|
| 29 |
-
//
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
|
|
|
| 38 |
}
|
| 39 |
|
| 40 |
-
export interface
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
}
|
| 45 |
|
| 46 |
export interface AIStatus {
|
| 47 |
-
llm: boolean;
|
| 48 |
-
|
| 49 |
-
transcription: boolean;
|
| 50 |
-
tts: boolean;
|
| 51 |
-
translation: boolean;
|
| 52 |
-
ocr: boolean;
|
| 53 |
initialized: boolean;
|
| 54 |
}
|
| 55 |
|
| 56 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 57 |
interface VectorEntry {
|
| 58 |
-
id: string;
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
metadata: Record<string, any>;
|
| 62 |
-
timestamp: number;
|
| 63 |
}
|
| 64 |
|
| 65 |
class LocalVectorStore {
|
|
@@ -71,509 +156,608 @@ class LocalVectorStore {
|
|
| 71 |
this.load();
|
| 72 |
}
|
| 73 |
|
| 74 |
-
add(id: string, text: string, vector: number[], metadata: Record<string, any>): void {
|
| 75 |
-
// Remove existing entry with same id
|
| 76 |
this.entries = this.entries.filter(e => e.id !== id);
|
| 77 |
-
this.entries.push({ id, text, vector, metadata, timestamp: Date.now() });
|
|
|
|
| 78 |
this.save();
|
| 79 |
}
|
| 80 |
|
| 81 |
-
search(queryVector: number[], topK: number = 5): RAGResult[] {
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
metadata: entry.metadata,
|
| 88 |
-
}));
|
| 89 |
-
|
| 90 |
-
return scored
|
| 91 |
.sort((a, b) => b.score - a.score)
|
| 92 |
.slice(0, topK);
|
| 93 |
}
|
| 94 |
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
let dotProduct = 0;
|
| 98 |
-
let normA = 0;
|
| 99 |
-
let normB = 0;
|
| 100 |
-
for (let i = 0; i < a.length; i++) {
|
| 101 |
-
dotProduct += a[i] * b[i];
|
| 102 |
-
normA += a[i] * a[i];
|
| 103 |
-
normB += b[i] * b[i];
|
| 104 |
-
}
|
| 105 |
-
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
|
| 106 |
-
return denominator === 0 ? 0 : dotProduct / denominator;
|
| 107 |
-
}
|
| 108 |
-
|
| 109 |
-
private load(): void {
|
| 110 |
-
try {
|
| 111 |
-
if (fs.existsSync(this.storePath)) {
|
| 112 |
-
this.entries = JSON.parse(fs.readFileSync(this.storePath, 'utf8'));
|
| 113 |
-
}
|
| 114 |
-
} catch {
|
| 115 |
-
this.entries = [];
|
| 116 |
-
}
|
| 117 |
}
|
| 118 |
|
| 119 |
-
private
|
| 120 |
-
|
| 121 |
-
}
|
| 122 |
}
|
| 123 |
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
- Help with Solana ecosystem questions
|
| 132 |
-
- Receive payment requests
|
| 133 |
-
|
| 134 |
-
When parsing commands, extract structured intents. Always respond concisely and clearly.
|
| 135 |
-
|
| 136 |
-
IMPORTANT SECURITY RULES:
|
| 137 |
-
- Never reveal private keys, mnemonics, or seed phrases
|
| 138 |
-
- Always confirm transaction details before execution
|
| 139 |
-
- Flag suspicious requests (unusually large amounts, unknown addresses)
|
| 140 |
-
- If unsure about an intent, ask for clarification
|
| 141 |
-
|
| 142 |
-
For transaction commands, extract: action, token (SOL or USDT), amount, recipient.
|
| 143 |
-
Format your intent extraction as JSON when asked to parse.`;
|
| 144 |
-
|
| 145 |
-
const INTENT_PARSE_PROMPT = `Parse the following user command into a wallet action. Return ONLY valid JSON with these fields:
|
| 146 |
-
- action: "send" | "balance" | "history" | "receive" | "swap" | "help" | "unknown"
|
| 147 |
-
- token: "SOL" | "USDT" | null
|
| 148 |
-
- amount: number | null
|
| 149 |
-
- to: string (address or contact name) | null
|
| 150 |
-
- confidence: number 0-1
|
| 151 |
-
- query: string | null (for help/search queries)
|
| 152 |
|
| 153 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 154 |
|
| 155 |
-
// βββ QVAC Engine βββββββββββββββββββββββββββββββββββββββββββββββββββοΏ½οΏ½βββββ
|
| 156 |
export class QVACEngine {
|
| 157 |
private qvac: any;
|
| 158 |
private vectorStore: LocalVectorStore;
|
| 159 |
-
private
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
tts: false,
|
| 164 |
-
translation: false,
|
| 165 |
-
ocr: false,
|
| 166 |
-
initialized: false,
|
| 167 |
-
};
|
| 168 |
|
| 169 |
constructor() {
|
| 170 |
this.qvac = new QVAC();
|
| 171 |
-
const
|
| 172 |
-
this.vectorStore = new LocalVectorStore(
|
| 173 |
-
|
| 174 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
}
|
| 176 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 177 |
/**
|
| 178 |
-
*
|
|
|
|
|
|
|
| 179 |
*/
|
| 180 |
-
async
|
| 181 |
-
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
// Register all plugins
|
| 185 |
-
this.qvac
|
| 186 |
-
.use(new LLMLlamacpp())
|
| 187 |
-
.use(new EmbedLlamacpp())
|
| 188 |
-
.use(new TranscriptionWhispercpp())
|
| 189 |
-
.use(new TTSOnnx())
|
| 190 |
-
.use(new TranslationNmtcpp())
|
| 191 |
-
.use(new OCROnnx());
|
| 192 |
-
|
| 193 |
-
// Load models in parallel where possible
|
| 194 |
-
const loadPromises: Promise<void>[] = [];
|
| 195 |
-
|
| 196 |
-
// LLM β primary model for chat and intent parsing
|
| 197 |
-
const llmModelPath = path.join(modelsDir, 'llama-3.2-3b-instruct-q4_k_m.gguf');
|
| 198 |
-
if (fs.existsSync(llmModelPath)) {
|
| 199 |
-
loadPromises.push(
|
| 200 |
-
this.qvac.llm.load(llmModelPath, {
|
| 201 |
-
contextSize: 4096,
|
| 202 |
-
nGpuLayers: 32,
|
| 203 |
-
}).then(() => {
|
| 204 |
-
this.status.llm = true;
|
| 205 |
-
console.log('[QVAC] β LLM loaded');
|
| 206 |
-
}).catch((e: Error) => console.error('[QVAC] β LLM failed:', e.message))
|
| 207 |
-
);
|
| 208 |
-
} else {
|
| 209 |
-
console.warn(`[QVAC] LLM model not found at ${llmModelPath}`);
|
| 210 |
-
}
|
| 211 |
|
| 212 |
-
//
|
| 213 |
-
|
| 214 |
-
if (
|
| 215 |
-
|
| 216 |
-
|
| 217 |
-
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
|
|
|
| 221 |
}
|
| 222 |
|
| 223 |
-
//
|
| 224 |
-
const
|
| 225 |
-
|
| 226 |
-
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
|
| 233 |
-
|
|
|
|
|
|
|
| 234 |
}
|
| 235 |
|
| 236 |
-
//
|
| 237 |
-
const
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
|
|
|
| 247 |
}
|
|
|
|
| 248 |
|
| 249 |
-
//
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
-
|
|
|
|
|
|
|
| 258 |
}
|
| 259 |
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 269 |
}
|
| 270 |
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 274 |
}
|
| 275 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 276 |
/**
|
| 277 |
-
*
|
| 278 |
-
*
|
|
|
|
| 279 |
*/
|
| 280 |
-
async
|
| 281 |
-
|
| 282 |
-
|
| 283 |
-
|
| 284 |
-
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
| 289 |
-
transcription = await this.qvac.transcription.transcribe(audioBuffer);
|
| 290 |
-
} else {
|
| 291 |
-
throw new Error('Speech-to-text not available. Please check model files.');
|
| 292 |
}
|
| 293 |
|
| 294 |
-
|
| 295 |
-
|
| 296 |
-
|
| 297 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 298 |
|
| 299 |
-
|
| 300 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 301 |
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
audio = await this.qvac.tts.synthesize(response);
|
| 307 |
-
} catch (e) {
|
| 308 |
-
console.warn('[QVAC] TTS failed, returning text only');
|
| 309 |
-
}
|
| 310 |
}
|
| 311 |
-
|
| 312 |
-
return { transcription, intent, response, audio };
|
| 313 |
}
|
| 314 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 315 |
/**
|
| 316 |
-
*
|
|
|
|
|
|
|
| 317 |
*/
|
| 318 |
-
async
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 319 |
if (!this.status.llm) {
|
| 320 |
-
//
|
| 321 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 322 |
}
|
| 323 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 324 |
try {
|
| 325 |
-
const
|
| 326 |
-
|
| 327 |
-
{ role: 'system', content: 'You are an intent parser. Return ONLY valid JSON.' },
|
| 328 |
{ role: 'user', content: prompt },
|
| 329 |
-
], {
|
| 330 |
-
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
// Extract JSON from response
|
| 335 |
-
const jsonMatch = response.match(/\{[\s\S]*\}/);
|
| 336 |
-
if (jsonMatch) {
|
| 337 |
-
const parsed = JSON.parse(jsonMatch[0]);
|
| 338 |
return {
|
| 339 |
-
|
| 340 |
-
|
| 341 |
-
|
| 342 |
-
|
| 343 |
-
query: parsed.query || undefined,
|
| 344 |
-
confidence: parsed.confidence || 0.5,
|
| 345 |
-
rawText: text,
|
| 346 |
};
|
| 347 |
}
|
| 348 |
-
} catch
|
| 349 |
-
console.warn('[QVAC] LLM intent parsing failed, using regex fallback');
|
| 350 |
-
}
|
| 351 |
|
| 352 |
-
return
|
| 353 |
}
|
| 354 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 355 |
/**
|
| 356 |
-
*
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 357 |
*/
|
| 358 |
-
|
| 359 |
-
const
|
| 360 |
-
|
| 361 |
-
//
|
| 362 |
-
|
| 363 |
-
|
| 364 |
-
);
|
| 365 |
-
|
| 366 |
-
|
| 367 |
-
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
|
| 374 |
-
|
| 375 |
-
|
| 376 |
-
|
| 377 |
-
|
| 378 |
-
|
| 379 |
-
|
| 380 |
-
// Balance patterns
|
| 381 |
-
if (/(?:balance|how much|what.*(?:have|balance|funds))/.test(lower)) {
|
| 382 |
-
return { action: 'balance', confidence: 0.9, rawText: text };
|
| 383 |
}
|
| 384 |
|
| 385 |
-
//
|
| 386 |
-
|
| 387 |
-
|
| 388 |
-
}
|
| 389 |
|
| 390 |
-
//
|
| 391 |
-
|
| 392 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 393 |
}
|
| 394 |
|
| 395 |
-
//
|
| 396 |
-
|
| 397 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 398 |
}
|
| 399 |
|
| 400 |
-
return {
|
| 401 |
}
|
| 402 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 403 |
/**
|
| 404 |
-
*
|
|
|
|
|
|
|
| 405 |
*/
|
| 406 |
-
|
| 407 |
-
|
| 408 |
-
// Fallback responses
|
| 409 |
-
switch (intent.action) {
|
| 410 |
-
case 'send':
|
| 411 |
-
return `Sending ${intent.amount} ${intent.token || 'SOL'} to ${intent.to || 'unknown address'}. Please confirm.`;
|
| 412 |
-
case 'balance':
|
| 413 |
-
return 'Checking your balance...';
|
| 414 |
-
case 'history':
|
| 415 |
-
return 'Loading your recent transactions...';
|
| 416 |
-
case 'receive':
|
| 417 |
-
return 'Here is your wallet address for receiving funds.';
|
| 418 |
-
case 'help':
|
| 419 |
-
return 'I can help you send SOL and USDT, check your balance, and view transaction history. Try saying "Send 5 SOL to..." or "What is my balance?"';
|
| 420 |
-
default:
|
| 421 |
-
return "I didn't understand that. Try saying 'send 5 SOL to...' or 'check my balance'.";
|
| 422 |
-
}
|
| 423 |
-
}
|
| 424 |
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
}
|
| 434 |
-
}
|
| 435 |
|
| 436 |
-
|
| 437 |
-
|
| 438 |
-
|
| 439 |
-
|
| 440 |
-
|
| 441 |
-
|
| 442 |
-
|
| 443 |
-
|
| 444 |
-
|
| 445 |
-
|
| 446 |
-
|
|
|
|
|
|
|
| 447 |
}
|
|
|
|
|
|
|
| 448 |
}
|
| 449 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 450 |
/**
|
| 451 |
-
*
|
|
|
|
|
|
|
| 452 |
*/
|
| 453 |
-
async
|
| 454 |
-
if (!this.status.
|
| 455 |
-
|
| 456 |
-
|
| 457 |
-
|
| 458 |
-
|
| 459 |
-
|
| 460 |
-
if (this.status.embed) {
|
| 461 |
-
const results = await this.semanticSearch(message);
|
| 462 |
-
if (results.length > 0) {
|
| 463 |
-
context = '\n\nContext from your wallet history:\n' +
|
| 464 |
-
results.slice(0, 3).map(r => `- ${r.text}`).join('\n');
|
| 465 |
-
}
|
| 466 |
-
}
|
| 467 |
-
|
| 468 |
-
return this.qvac.llm.chat([
|
| 469 |
-
{ role: 'system', content: WALLET_SYSTEM_PROMPT + context },
|
| 470 |
-
{ role: 'user', content: message },
|
| 471 |
-
], {
|
| 472 |
-
maxTokens: 512,
|
| 473 |
-
temperature: 0.7,
|
| 474 |
-
});
|
| 475 |
}
|
| 476 |
|
| 477 |
/**
|
| 478 |
-
*
|
| 479 |
*/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 480 |
async speak(text: string): Promise<Buffer> {
|
| 481 |
-
if (!this.status.tts)
|
| 482 |
-
throw new Error('Text-to-speech not available');
|
| 483 |
-
}
|
| 484 |
return this.qvac.tts.synthesize(text);
|
| 485 |
}
|
| 486 |
|
| 487 |
-
/**
|
| 488 |
-
* Translate text between languages
|
| 489 |
-
*/
|
| 490 |
async translate(text: string, from: string, to: string): Promise<string> {
|
| 491 |
-
if (!this.status.translation)
|
| 492 |
-
throw new Error('Translation not available');
|
| 493 |
-
}
|
| 494 |
return this.qvac.translation.translate(text, { from, to });
|
| 495 |
}
|
| 496 |
|
| 497 |
-
/**
|
| 498 |
-
* Generate text embeddings
|
| 499 |
-
*/
|
| 500 |
async embed(text: string): Promise<number[]> {
|
| 501 |
-
if (!this.status.embed)
|
| 502 |
-
throw new Error('Embeddings not available');
|
| 503 |
-
}
|
| 504 |
return this.qvac.embed.embed(text);
|
| 505 |
}
|
| 506 |
|
| 507 |
-
/**
|
| 508 |
-
* OCR β extract text from image
|
| 509 |
-
*/
|
| 510 |
async ocr(imageBuffer: Buffer): Promise<string> {
|
| 511 |
-
if (!this.status.ocr)
|
| 512 |
-
throw new Error('OCR not available');
|
| 513 |
-
}
|
| 514 |
return this.qvac.ocr.recognize(imageBuffer, { format: 'text' });
|
| 515 |
}
|
| 516 |
|
| 517 |
-
|
| 518 |
-
|
| 519 |
-
*/
|
| 520 |
-
async semanticSearch(query: string): Promise<RAGResult[]> {
|
| 521 |
-
if (!this.status.embed) return [];
|
| 522 |
|
| 523 |
-
|
| 524 |
-
|
| 525 |
-
return this.vectorStore.search(queryVector, 5);
|
| 526 |
-
} catch {
|
| 527 |
-
return [];
|
| 528 |
-
}
|
| 529 |
}
|
| 530 |
|
| 531 |
-
/
|
| 532 |
-
|
| 533 |
-
|
| 534 |
-
async addToKnowledgeBase(text: string, metadata: Record<string, any>): Promise<void> {
|
| 535 |
-
if (!this.status.embed) return;
|
| 536 |
|
| 537 |
-
|
| 538 |
-
|
| 539 |
-
|
| 540 |
-
|
| 541 |
-
} catch (error) {
|
| 542 |
-
console.error('[QVAC] Failed to add to knowledge base:', error);
|
| 543 |
-
}
|
| 544 |
}
|
| 545 |
|
| 546 |
-
|
| 547 |
-
|
| 548 |
-
|
| 549 |
-
|
| 550 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 551 |
}
|
| 552 |
|
| 553 |
-
|
| 554 |
-
|
| 555 |
-
|
| 556 |
-
|
| 557 |
-
|
| 558 |
-
|
| 559 |
-
|
| 560 |
-
|
| 561 |
-
transcription: false,
|
| 562 |
-
tts: false,
|
| 563 |
-
translation: false,
|
| 564 |
-
ocr: false,
|
| 565 |
-
initialized: false,
|
| 566 |
-
};
|
| 567 |
}
|
| 568 |
|
| 569 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 570 |
|
| 571 |
private getModelsDir(): string {
|
| 572 |
-
|
| 573 |
-
// In development: models are in project root
|
| 574 |
-
if (app?.isPackaged) {
|
| 575 |
-
return path.join(process.resourcesPath, 'models');
|
| 576 |
-
}
|
| 577 |
-
return path.join(__dirname, '../../..', 'models');
|
| 578 |
}
|
| 579 |
}
|
|
|
|
| 1 |
/**
|
| 2 |
+
* SolVox β QVAC AI Engine (Deep Integration)
|
| 3 |
*
|
| 4 |
+
* This is NOT a wrapper or demo. QVAC is the brain of the wallet.
|
| 5 |
+
* Every user interaction flows through QVAC's local AI pipeline.
|
| 6 |
*
|
| 7 |
+
* DEEP INTEGRATION ARCHITECTURE:
|
| 8 |
+
*
|
| 9 |
+
* 1. TOOL-USE AGENT (@qvac/llm-llamacpp)
|
| 10 |
+
* The LLM acts as an autonomous agent with tool-calling capability.
|
| 11 |
+
* It receives user commands and decides which wallet functions to call,
|
| 12 |
+
* in what order, with what parameters. Multi-step reasoning allows
|
| 13 |
+
* the agent to chain: check balance β verify sufficient funds β
|
| 14 |
+
* resolve contact β confirm amount β execute transaction.
|
| 15 |
+
*
|
| 16 |
+
* 2. SEMANTIC CONTACT BOOK (@qvac/embed-llamacpp)
|
| 17 |
+
* Every contact, address, and transaction is embedded into a local
|
| 18 |
+
* vector store. "Send to Alice" resolves to the correct address via
|
| 19 |
+
* cosine similarity β no exact match needed. Transactions are auto-
|
| 20 |
+
* indexed on completion for future RAG retrieval.
|
| 21 |
+
*
|
| 22 |
+
* 3. AI-POWERED SECURITY (@qvac/llm-llamacpp + @qvac/embed-llamacpp)
|
| 23 |
+
* The LLM analyzes every outgoing transaction against the user's
|
| 24 |
+
* spending patterns (embedded history) and generates risk assessments.
|
| 25 |
+
* This replaces rule-based anomaly detection with genuine AI reasoning.
|
| 26 |
+
*
|
| 27 |
+
* 4. VOICE AGENT PIPELINE (@qvac/transcription-whispercpp β llm β tts)
|
| 28 |
+
* Voice commands run through the full agent loop: transcribe β
|
| 29 |
+
* translate (if non-English) β agent reasons + executes tools β
|
| 30 |
+
* generate response β translate back β synthesize speech. The agent
|
| 31 |
+
* can handle multi-turn conversations and ask for confirmation.
|
| 32 |
+
*
|
| 33 |
+
* 5. DOCUMENT-TO-PAYMENT (@qvac/ocr-onnx β @qvac/llm-llamacpp)
|
| 34 |
+
* OCR extracts raw text from invoices/QR codes/screenshots.
|
| 35 |
+
* The LLM then parses the unstructured text into structured payment
|
| 36 |
+
* data (amount, recipient, token, memo). This enables: take photo
|
| 37 |
+
* of invoice β auto-populate payment form.
|
| 38 |
+
*
|
| 39 |
+
* 6. MULTILINGUAL FINANCIAL ASSISTANT (@qvac/translation-nmtcpp)
|
| 40 |
+
* The translation model is embedded in the agent loop, not exposed
|
| 41 |
+
* as a standalone endpoint. Users speak any language; the system
|
| 42 |
+
* translates to English for processing, then back for response.
|
| 43 |
+
* This serves the unbanked who don't speak English.
|
| 44 |
*
|
| 45 |
* ALL AI runs 100% locally via QVAC's Vulkan-accelerated engine.
|
| 46 |
+
* No data ever leaves the device.
|
| 47 |
*/
|
| 48 |
|
| 49 |
import { QVAC } from '@qvac/sdk';
|
|
|
|
| 57 |
import * as fs from 'fs';
|
| 58 |
import { app } from 'electron';
|
| 59 |
|
| 60 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 61 |
+
// TYPES
|
| 62 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 63 |
+
|
| 64 |
+
export interface AgentAction {
|
| 65 |
+
tool: 'check_balance' | 'send_sol' | 'send_usdt' | 'get_history' |
|
| 66 |
+
'resolve_contact' | 'get_address' | 'confirm_transaction' |
|
| 67 |
+
'search_knowledge' | 'explain' | 'none';
|
| 68 |
+
params: Record<string, any>;
|
| 69 |
+
reasoning: string;
|
| 70 |
}
|
| 71 |
|
| 72 |
+
export interface AgentResult {
|
| 73 |
+
actions: AgentAction[];
|
| 74 |
+
response: string;
|
| 75 |
+
requiresConfirmation: boolean;
|
| 76 |
+
pendingTransaction?: {
|
| 77 |
+
token: string;
|
| 78 |
+
amount: number;
|
| 79 |
+
to: string;
|
| 80 |
+
toLabel?: string;
|
| 81 |
+
riskAssessment?: RiskAssessment;
|
| 82 |
+
};
|
| 83 |
+
pipelineSteps: PipelineStep[];
|
| 84 |
+
}
|
| 85 |
+
|
| 86 |
+
export interface PipelineStep {
|
| 87 |
+
module: string; // Which QVAC package
|
| 88 |
+
operation: string; // What it did
|
| 89 |
+
input: string; // Abbreviated input
|
| 90 |
+
output: string; // Abbreviated output
|
| 91 |
+
durationMs: number; // How long it took
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
export interface RiskAssessment {
|
| 95 |
+
score: number; // 0-100 (0 = safe, 100 = dangerous)
|
| 96 |
+
level: 'safe' | 'caution' | 'warning' | 'danger';
|
| 97 |
+
factors: string[]; // What the AI flagged
|
| 98 |
+
recommendation: string; // AI's advice
|
| 99 |
+
}
|
| 100 |
+
|
| 101 |
+
export interface Contact {
|
| 102 |
+
name: string;
|
| 103 |
+
address: string;
|
| 104 |
+
notes?: string;
|
| 105 |
+
lastUsed?: number;
|
| 106 |
+
txCount: number;
|
| 107 |
}
|
| 108 |
|
| 109 |
export interface AIStatus {
|
| 110 |
+
llm: boolean; embed: boolean; transcription: boolean;
|
| 111 |
+
tts: boolean; translation: boolean; ocr: boolean;
|
|
|
|
|
|
|
|
|
|
|
|
|
| 112 |
initialized: boolean;
|
| 113 |
}
|
| 114 |
|
| 115 |
+
export interface RAGResult {
|
| 116 |
+
text: string; score: number; metadata: Record<string, any>;
|
| 117 |
+
}
|
| 118 |
+
|
| 119 |
+
export interface VoiceResult {
|
| 120 |
+
transcription: string;
|
| 121 |
+
detectedLanguage?: string;
|
| 122 |
+
translatedText?: string;
|
| 123 |
+
agentResult: AgentResult;
|
| 124 |
+
responseAudio?: Buffer;
|
| 125 |
+
pipelineSteps: PipelineStep[];
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
export interface OCRPaymentResult {
|
| 129 |
+
rawText: string;
|
| 130 |
+
extractedData: {
|
| 131 |
+
amount?: number;
|
| 132 |
+
token?: string;
|
| 133 |
+
recipient?: string;
|
| 134 |
+
memo?: string;
|
| 135 |
+
confidence: number;
|
| 136 |
+
};
|
| 137 |
+
pipelineSteps: PipelineStep[];
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 141 |
+
// LOCAL VECTOR STORE (for contacts, transactions, knowledge)
|
| 142 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 143 |
+
|
| 144 |
interface VectorEntry {
|
| 145 |
+
id: string; text: string; vector: number[];
|
| 146 |
+
metadata: Record<string, any>; timestamp: number;
|
| 147 |
+
category: 'contact' | 'transaction' | 'knowledge' | 'query';
|
|
|
|
|
|
|
| 148 |
}
|
| 149 |
|
| 150 |
class LocalVectorStore {
|
|
|
|
| 156 |
this.load();
|
| 157 |
}
|
| 158 |
|
| 159 |
+
add(id: string, text: string, vector: number[], metadata: Record<string, any>, category: VectorEntry['category']): void {
|
|
|
|
| 160 |
this.entries = this.entries.filter(e => e.id !== id);
|
| 161 |
+
this.entries.push({ id, text, vector, metadata, timestamp: Date.now(), category });
|
| 162 |
+
if (this.entries.length > 2000) this.entries = this.entries.slice(-2000);
|
| 163 |
this.save();
|
| 164 |
}
|
| 165 |
|
| 166 |
+
search(queryVector: number[], topK: number = 5, category?: VectorEntry['category']): RAGResult[] {
|
| 167 |
+
let candidates = this.entries;
|
| 168 |
+
if (category) candidates = candidates.filter(e => e.category === category);
|
| 169 |
+
if (candidates.length === 0) return [];
|
| 170 |
+
return candidates
|
| 171 |
+
.map(e => ({ text: e.text, score: cosine(queryVector, e.vector), metadata: e.metadata }))
|
|
|
|
|
|
|
|
|
|
|
|
|
| 172 |
.sort((a, b) => b.score - a.score)
|
| 173 |
.slice(0, topK);
|
| 174 |
}
|
| 175 |
|
| 176 |
+
getByCategory(category: VectorEntry['category']): VectorEntry[] {
|
| 177 |
+
return this.entries.filter(e => e.category === category);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 178 |
}
|
| 179 |
|
| 180 |
+
private load() { try { if (fs.existsSync(this.storePath)) this.entries = JSON.parse(fs.readFileSync(this.storePath, 'utf8')); } catch { this.entries = []; } }
|
| 181 |
+
private save() { fs.writeFileSync(this.storePath, JSON.stringify(this.entries)); }
|
|
|
|
| 182 |
}
|
| 183 |
|
| 184 |
+
function cosine(a: number[], b: number[]): number {
|
| 185 |
+
if (a.length !== b.length) return 0;
|
| 186 |
+
let dot = 0, nA = 0, nB = 0;
|
| 187 |
+
for (let i = 0; i < a.length; i++) { dot += a[i]*b[i]; nA += a[i]*a[i]; nB += b[i]*b[i]; }
|
| 188 |
+
const d = Math.sqrt(nA) * Math.sqrt(nB);
|
| 189 |
+
return d === 0 ? 0 : dot / d;
|
| 190 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 191 |
|
| 192 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 193 |
+
// SYSTEM PROMPTS β These turn the LLM into a wallet agent
|
| 194 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 195 |
+
|
| 196 |
+
const AGENT_SYSTEM_PROMPT = `You are SolVox, an autonomous AI wallet agent running 100% locally on the user's device via QVAC SDK. You have direct access to wallet tools.
|
| 197 |
+
|
| 198 |
+
AVAILABLE TOOLS (call by returning JSON):
|
| 199 |
+
- check_balance: {} β returns SOL and USDT balance
|
| 200 |
+
- send_sol: {to: "address", amount: number} β sends SOL
|
| 201 |
+
- send_usdt: {to: "address", amount: number} β sends USDT
|
| 202 |
+
- get_history: {limit: number} β returns recent transactions
|
| 203 |
+
- resolve_contact: {query: "name or description"} β finds address from contact book
|
| 204 |
+
- get_address: {} β returns user's wallet address
|
| 205 |
+
- search_knowledge: {query: "question"} β searches transaction history and knowledge base
|
| 206 |
+
- confirm_transaction: {token, amount, to, toLabel} β asks user to confirm before sending
|
| 207 |
+
|
| 208 |
+
AGENT RULES:
|
| 209 |
+
1. ALWAYS call confirm_transaction before any send. Never send without confirmation.
|
| 210 |
+
2. If user says a name (e.g. "Alice"), call resolve_contact first to get the address.
|
| 211 |
+
3. If amount seems high relative to balance, flag it in your response.
|
| 212 |
+
4. For "check balance" β call check_balance and format the result conversationally.
|
| 213 |
+
5. For help/questions β use search_knowledge if relevant, then answer from context.
|
| 214 |
+
6. NEVER reveal private keys, mnemonics, or seed phrases.
|
| 215 |
+
7. Respond concisely. No filler.
|
| 216 |
+
|
| 217 |
+
Return a JSON object with:
|
| 218 |
+
{
|
| 219 |
+
"reasoning": "brief chain-of-thought",
|
| 220 |
+
"actions": [{"tool": "tool_name", "params": {...}}],
|
| 221 |
+
"response": "natural language response to user",
|
| 222 |
+
"requiresConfirmation": true/false
|
| 223 |
+
}`;
|
| 224 |
+
|
| 225 |
+
const RISK_ASSESSMENT_PROMPT = `Analyze this transaction for risk. You have the user's spending history below.
|
| 226 |
+
|
| 227 |
+
Transaction: {AMOUNT} {TOKEN} to {RECIPIENT}
|
| 228 |
+
User's average transaction: {AVG_AMOUNT} {TOKEN}
|
| 229 |
+
User's total today: {TODAY_TOTAL} {TOKEN}
|
| 230 |
+
User's daily average: {DAILY_AVG} {TOKEN}
|
| 231 |
+
Times sent to this address before: {TIMES_SENT}
|
| 232 |
+
Time of day: {HOUR}:00
|
| 233 |
+
|
| 234 |
+
Return JSON:
|
| 235 |
+
{
|
| 236 |
+
"score": 0-100 (0=safe, 100=dangerous),
|
| 237 |
+
"level": "safe"|"caution"|"warning"|"danger",
|
| 238 |
+
"factors": ["list of risk factors found"],
|
| 239 |
+
"recommendation": "one sentence advice"
|
| 240 |
+
}`;
|
| 241 |
+
|
| 242 |
+
const OCR_EXTRACTION_PROMPT = `Extract payment information from this OCR text. Return ONLY valid JSON.
|
| 243 |
+
|
| 244 |
+
OCR Text:
|
| 245 |
+
"""
|
| 246 |
+
{TEXT}
|
| 247 |
+
"""
|
| 248 |
+
|
| 249 |
+
Extract:
|
| 250 |
+
{
|
| 251 |
+
"amount": number or null,
|
| 252 |
+
"token": "SOL"|"USDT"|null,
|
| 253 |
+
"recipient": "Solana address if found" or null,
|
| 254 |
+
"memo": "any note/description" or null,
|
| 255 |
+
"confidence": 0-1
|
| 256 |
+
}`;
|
| 257 |
+
|
| 258 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 259 |
+
// QVAC ENGINE β The Brain
|
| 260 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 261 |
|
|
|
|
| 262 |
export class QVACEngine {
|
| 263 |
private qvac: any;
|
| 264 |
private vectorStore: LocalVectorStore;
|
| 265 |
+
private contacts: Map<string, Contact> = new Map();
|
| 266 |
+
private conversationHistory: Array<{ role: string; content: string }> = [];
|
| 267 |
+
private walletContext: { balance?: any; address?: string; history?: any[] } = {};
|
| 268 |
+
private status: AIStatus = { llm: false, embed: false, transcription: false, tts: false, translation: false, ocr: false, initialized: false };
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 269 |
|
| 270 |
constructor() {
|
| 271 |
this.qvac = new QVAC();
|
| 272 |
+
const ud = app?.getPath('userData') ?? '/tmp/solvox';
|
| 273 |
+
this.vectorStore = new LocalVectorStore(path.join(ud, 'vector-store.json'));
|
| 274 |
+
this.loadContacts(ud);
|
| 275 |
+
}
|
| 276 |
+
|
| 277 |
+
// βββ Initialization ββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 278 |
+
|
| 279 |
+
async initialize(): Promise<void> {
|
| 280 |
+
console.log('[QVAC] Initializing deep AI engine...');
|
| 281 |
+
const md = this.getModelsDir();
|
| 282 |
+
|
| 283 |
+
this.qvac.use(new LLMLlamacpp()).use(new EmbedLlamacpp())
|
| 284 |
+
.use(new TranscriptionWhispercpp()).use(new TTSOnnx())
|
| 285 |
+
.use(new TranslationNmtcpp()).use(new OCROnnx());
|
| 286 |
+
|
| 287 |
+
const loads: Promise<void>[] = [];
|
| 288 |
+
const tryLoad = (name: string, key: keyof AIStatus, fn: () => Promise<void>) => {
|
| 289 |
+
loads.push(fn().then(() => { (this.status as any)[key] = true; console.log(`[QVAC] β ${name}`); })
|
| 290 |
+
.catch((e: Error) => console.warn(`[QVAC] β ${name}: ${e.message}`)));
|
| 291 |
+
};
|
| 292 |
+
|
| 293 |
+
const p = (f: string) => path.join(md, f);
|
| 294 |
+
if (fs.existsSync(p('llama-3.2-3b-instruct-q4_k_m.gguf')))
|
| 295 |
+
tryLoad('LLM', 'llm', () => this.qvac.llm.load(p('llama-3.2-3b-instruct-q4_k_m.gguf'), { contextSize: 4096, nGpuLayers: 32 }));
|
| 296 |
+
if (fs.existsSync(p('nomic-embed-text-v1.5.Q4_K_M.gguf')))
|
| 297 |
+
tryLoad('Embeddings', 'embed', () => this.qvac.embed.load(p('nomic-embed-text-v1.5.Q4_K_M.gguf')));
|
| 298 |
+
if (fs.existsSync(p('ggml-base.en.bin')))
|
| 299 |
+
tryLoad('STT', 'transcription', () => this.qvac.transcription.load(p('ggml-base.en.bin'), { language: 'en' }));
|
| 300 |
+
if (fs.existsSync(p('en_US-amy-medium.onnx')))
|
| 301 |
+
tryLoad('TTS', 'tts', () => this.qvac.tts.load(p('en_US-amy-medium.onnx'), { sampleRate: 22050 }));
|
| 302 |
+
if (fs.existsSync(p('translate-en-es.bin')))
|
| 303 |
+
tryLoad('Translation', 'translation', () => this.qvac.translation.load(p('translate-en-es.bin')));
|
| 304 |
+
if (fs.existsSync(p('ppocr-v4.onnx')))
|
| 305 |
+
tryLoad('OCR', 'ocr', () => this.qvac.ocr.load(p('ppocr-v4.onnx')));
|
| 306 |
+
|
| 307 |
+
await Promise.allSettled(loads);
|
| 308 |
+
this.status.initialized = true;
|
| 309 |
+
console.log('[QVAC] Deep engine ready:', this.status);
|
| 310 |
}
|
| 311 |
|
| 312 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 313 |
+
// 1. TOOL-USE AGENT β The core integration
|
| 314 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 315 |
+
|
| 316 |
/**
|
| 317 |
+
* The LLM agent processes user input, reasons about what tools to call,
|
| 318 |
+
* and generates a structured action plan. This is the primary QVAC
|
| 319 |
+
* integration point β the LLM drives ALL wallet operations.
|
| 320 |
*/
|
| 321 |
+
async runAgent(userMessage: string, walletContext?: any): Promise<AgentResult> {
|
| 322 |
+
const steps: PipelineStep[] = [];
|
| 323 |
+
if (walletContext) this.walletContext = walletContext;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 324 |
|
| 325 |
+
// Enrich with RAG context (embeddings)
|
| 326 |
+
let ragContext = '';
|
| 327 |
+
if (this.status.embed) {
|
| 328 |
+
const t0 = Date.now();
|
| 329 |
+
const results = await this.semanticSearch(userMessage, 3);
|
| 330 |
+
steps.push({ module: '@qvac/embed-llamacpp', operation: 'RAG context retrieval', input: userMessage.slice(0, 50), output: `${results.length} results`, durationMs: Date.now() - t0 });
|
| 331 |
+
if (results.length > 0) {
|
| 332 |
+
ragContext = '\n\nRELEVANT CONTEXT FROM USER HISTORY:\n' +
|
| 333 |
+
results.map(r => `- [${(r.score * 100).toFixed(0)}% match] ${r.text}`).join('\n');
|
| 334 |
+
}
|
| 335 |
}
|
| 336 |
|
| 337 |
+
// Build conversation context
|
| 338 |
+
const systemMsg = AGENT_SYSTEM_PROMPT +
|
| 339 |
+
(this.walletContext.balance ? `\n\nCURRENT BALANCE: ${this.walletContext.balance.sol} SOL, ${this.walletContext.balance.usdt} USDT` : '') +
|
| 340 |
+
(this.walletContext.address ? `\nWALLET ADDRESS: ${this.walletContext.address}` : '') +
|
| 341 |
+
ragContext;
|
| 342 |
+
|
| 343 |
+
// Add to conversation history (keep last 6 turns)
|
| 344 |
+
this.conversationHistory.push({ role: 'user', content: userMessage });
|
| 345 |
+
if (this.conversationHistory.length > 12) this.conversationHistory = this.conversationHistory.slice(-12);
|
| 346 |
+
|
| 347 |
+
if (!this.status.llm) {
|
| 348 |
+
// Fallback: regex agent
|
| 349 |
+
return this.fallbackAgent(userMessage, steps);
|
| 350 |
}
|
| 351 |
|
| 352 |
+
// Run LLM agent
|
| 353 |
+
const t1 = Date.now();
|
| 354 |
+
const messages = [
|
| 355 |
+
{ role: 'system', content: systemMsg },
|
| 356 |
+
...this.conversationHistory,
|
| 357 |
+
];
|
| 358 |
+
|
| 359 |
+
let llmResponse: string;
|
| 360 |
+
try {
|
| 361 |
+
llmResponse = await this.qvac.llm.chat(messages, { maxTokens: 512, temperature: 0.2 });
|
| 362 |
+
} catch (e: any) {
|
| 363 |
+
return this.fallbackAgent(userMessage, steps);
|
| 364 |
}
|
| 365 |
+
steps.push({ module: '@qvac/llm-llamacpp', operation: 'Agent reasoning + tool selection', input: userMessage.slice(0, 50), output: llmResponse.slice(0, 100), durationMs: Date.now() - t1 });
|
| 366 |
|
| 367 |
+
// Parse agent response
|
| 368 |
+
let parsed: any;
|
| 369 |
+
try {
|
| 370 |
+
const jsonMatch = llmResponse.match(/\{[\s\S]*\}/);
|
| 371 |
+
parsed = jsonMatch ? JSON.parse(jsonMatch[0]) : null;
|
| 372 |
+
} catch { parsed = null; }
|
| 373 |
+
|
| 374 |
+
if (!parsed) {
|
| 375 |
+
// LLM returned freeform text, wrap it
|
| 376 |
+
this.conversationHistory.push({ role: 'assistant', content: llmResponse });
|
| 377 |
+
return { actions: [], response: llmResponse, requiresConfirmation: false, pipelineSteps: steps };
|
| 378 |
}
|
| 379 |
|
| 380 |
+
const actions: AgentAction[] = (parsed.actions || []).map((a: any) => ({
|
| 381 |
+
tool: a.tool || 'none', params: a.params || {}, reasoning: parsed.reasoning || '',
|
| 382 |
+
}));
|
| 383 |
+
|
| 384 |
+
// Extract pending transaction from actions
|
| 385 |
+
let pendingTx: AgentResult['pendingTransaction'];
|
| 386 |
+
const confirmAction = actions.find(a => a.tool === 'confirm_transaction');
|
| 387 |
+
if (confirmAction) {
|
| 388 |
+
pendingTx = {
|
| 389 |
+
token: confirmAction.params.token || 'SOL',
|
| 390 |
+
amount: confirmAction.params.amount || 0,
|
| 391 |
+
to: confirmAction.params.to || '',
|
| 392 |
+
toLabel: confirmAction.params.toLabel,
|
| 393 |
+
};
|
| 394 |
+
|
| 395 |
+
// Run AI risk assessment on the pending transaction
|
| 396 |
+
if (pendingTx.amount > 0 && this.status.llm) {
|
| 397 |
+
pendingTx.riskAssessment = await this.assessTransactionRisk(pendingTx.amount, pendingTx.token, pendingTx.to);
|
| 398 |
+
steps.push({ module: '@qvac/llm-llamacpp', operation: 'AI risk assessment', input: `${pendingTx.amount} ${pendingTx.token}`, output: `Risk: ${pendingTx.riskAssessment.level}`, durationMs: 0 });
|
| 399 |
+
}
|
| 400 |
}
|
| 401 |
|
| 402 |
+
this.conversationHistory.push({ role: 'assistant', content: parsed.response || llmResponse });
|
| 403 |
+
|
| 404 |
+
return {
|
| 405 |
+
actions,
|
| 406 |
+
response: parsed.response || llmResponse,
|
| 407 |
+
requiresConfirmation: parsed.requiresConfirmation ?? !!confirmAction,
|
| 408 |
+
pendingTransaction: pendingTx,
|
| 409 |
+
pipelineSteps: steps,
|
| 410 |
+
};
|
| 411 |
}
|
| 412 |
|
| 413 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 414 |
+
// 2. SEMANTIC CONTACT RESOLUTION (@qvac/embed-llamacpp)
|
| 415 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 416 |
+
|
| 417 |
/**
|
| 418 |
+
* Resolve a name/description to a Solana address using semantic search.
|
| 419 |
+
* "Send to Alice" β finds Alice's embedded contact β returns her address.
|
| 420 |
+
* This is a real use of embeddings, not just a database lookup.
|
| 421 |
*/
|
| 422 |
+
async resolveContact(query: string): Promise<{ address: string; name: string; confidence: number } | null> {
|
| 423 |
+
if (!this.status.embed) {
|
| 424 |
+
// Exact match fallback
|
| 425 |
+
for (const [_, contact] of this.contacts) {
|
| 426 |
+
if (contact.name.toLowerCase().includes(query.toLowerCase())) {
|
| 427 |
+
return { address: contact.address, name: contact.name, confidence: 0.9 };
|
| 428 |
+
}
|
| 429 |
+
}
|
| 430 |
+
return null;
|
|
|
|
|
|
|
|
|
|
| 431 |
}
|
| 432 |
|
| 433 |
+
const qVec = await this.qvac.embed.embed(query);
|
| 434 |
+
const results = this.vectorStore.search(qVec, 1, 'contact');
|
| 435 |
+
if (results.length > 0 && results[0].score > 0.5) {
|
| 436 |
+
return {
|
| 437 |
+
address: results[0].metadata.address,
|
| 438 |
+
name: results[0].metadata.name,
|
| 439 |
+
confidence: results[0].score,
|
| 440 |
+
};
|
| 441 |
+
}
|
| 442 |
+
return null;
|
| 443 |
+
}
|
| 444 |
|
| 445 |
+
/**
|
| 446 |
+
* Add a contact to the semantic contact book.
|
| 447 |
+
* The contact's name, address, and notes are all embedded together.
|
| 448 |
+
*/
|
| 449 |
+
async addContact(contact: Contact): Promise<void> {
|
| 450 |
+
this.contacts.set(contact.address, contact);
|
| 451 |
+
this.saveContacts();
|
| 452 |
|
| 453 |
+
if (this.status.embed) {
|
| 454 |
+
const text = `Contact: ${contact.name}. Address: ${contact.address}. ${contact.notes || ''}`;
|
| 455 |
+
const vec = await this.qvac.embed.embed(text);
|
| 456 |
+
this.vectorStore.add(`contact_${contact.address}`, text, vec, { address: contact.address, name: contact.name }, 'contact');
|
|
|
|
|
|
|
|
|
|
|
|
|
| 457 |
}
|
|
|
|
|
|
|
| 458 |
}
|
| 459 |
|
| 460 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 461 |
+
// 3. AI-POWERED TRANSACTION SECURITY (@qvac/llm + @qvac/embed)
|
| 462 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 463 |
+
|
| 464 |
/**
|
| 465 |
+
* The LLM analyzes a pending transaction against the user's embedded
|
| 466 |
+
* spending patterns and generates a risk assessment. This replaces
|
| 467 |
+
* simple rule-based checks with genuine AI reasoning.
|
| 468 |
*/
|
| 469 |
+
async assessTransactionRisk(amount: number, token: string, recipient: string): Promise<RiskAssessment> {
|
| 470 |
+
// Gather spending context from embedded transaction history
|
| 471 |
+
const txEntries = this.vectorStore.getByCategory('transaction');
|
| 472 |
+
const tokenTxs = txEntries.filter(e => e.metadata.token === token);
|
| 473 |
+
const avgAmount = tokenTxs.length > 0 ? tokenTxs.reduce((s, e) => s + (e.metadata.amount || 0), 0) / tokenTxs.length : 0;
|
| 474 |
+
|
| 475 |
+
const todayStart = new Date(); todayStart.setHours(0, 0, 0, 0);
|
| 476 |
+
const todayTotal = tokenTxs.filter(e => e.timestamp >= todayStart.getTime()).reduce((s, e) => s + (e.metadata.amount || 0), 0);
|
| 477 |
+
|
| 478 |
+
const dayCount = txEntries.length > 0 ? Math.max(1, (Date.now() - txEntries[0].timestamp) / 86400000) : 1;
|
| 479 |
+
const dailyAvg = tokenTxs.reduce((s, e) => s + (e.metadata.amount || 0), 0) / dayCount;
|
| 480 |
+
const timesSent = txEntries.filter(e => e.metadata.to === recipient).length;
|
| 481 |
+
|
| 482 |
if (!this.status.llm) {
|
| 483 |
+
// Rule-based fallback
|
| 484 |
+
const score = (amount > avgAmount * 5 ? 30 : 0) + (timesSent === 0 ? 20 : 0) + (todayTotal + amount > dailyAvg * 3 ? 25 : 0);
|
| 485 |
+
return {
|
| 486 |
+
score: Math.min(100, score),
|
| 487 |
+
level: score >= 60 ? 'danger' : score >= 40 ? 'warning' : score >= 20 ? 'caution' : 'safe',
|
| 488 |
+
factors: [
|
| 489 |
+
...(amount > avgAmount * 5 ? [`Amount is ${(amount / Math.max(avgAmount, 0.01)).toFixed(1)}x your average`] : []),
|
| 490 |
+
...(timesSent === 0 ? ['First time sending to this address'] : []),
|
| 491 |
+
...(todayTotal + amount > dailyAvg * 3 ? ['Daily volume unusually high'] : []),
|
| 492 |
+
],
|
| 493 |
+
recommendation: score >= 40 ? 'Double-check the recipient address and amount.' : 'Transaction looks normal.',
|
| 494 |
+
};
|
| 495 |
}
|
| 496 |
|
| 497 |
+
// LLM risk analysis
|
| 498 |
+
const prompt = RISK_ASSESSMENT_PROMPT
|
| 499 |
+
.replace('{AMOUNT}', amount.toString()).replace('{TOKEN}', token)
|
| 500 |
+
.replace('{RECIPIENT}', recipient.slice(0, 8) + '...')
|
| 501 |
+
.replace('{AVG_AMOUNT}', avgAmount.toFixed(2)).replace('{TODAY_TOTAL}', todayTotal.toFixed(2))
|
| 502 |
+
.replace('{DAILY_AVG}', dailyAvg.toFixed(2)).replace('{TIMES_SENT}', timesSent.toString())
|
| 503 |
+
.replace('{HOUR}', new Date().getHours().toString());
|
| 504 |
+
|
| 505 |
try {
|
| 506 |
+
const resp = await this.qvac.llm.chat([
|
| 507 |
+
{ role: 'system', content: 'You are a transaction risk analyzer. Return ONLY valid JSON.' },
|
|
|
|
| 508 |
{ role: 'user', content: prompt },
|
| 509 |
+
], { maxTokens: 256, temperature: 0.1 });
|
| 510 |
+
|
| 511 |
+
const match = resp.match(/\{[\s\S]*\}/);
|
| 512 |
+
if (match) {
|
| 513 |
+
const parsed = JSON.parse(match[0]);
|
|
|
|
|
|
|
|
|
|
|
|
|
| 514 |
return {
|
| 515 |
+
score: Math.min(100, Math.max(0, parsed.score || 0)),
|
| 516 |
+
level: parsed.level || 'safe',
|
| 517 |
+
factors: parsed.factors || [],
|
| 518 |
+
recommendation: parsed.recommendation || '',
|
|
|
|
|
|
|
|
|
|
| 519 |
};
|
| 520 |
}
|
| 521 |
+
} catch {}
|
|
|
|
|
|
|
| 522 |
|
| 523 |
+
return { score: 0, level: 'safe', factors: [], recommendation: 'Unable to assess risk.' };
|
| 524 |
}
|
| 525 |
|
| 526 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 527 |
+
// 4. VOICE AGENT β Full 6-module pipeline
|
| 528 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 529 |
+
|
| 530 |
/**
|
| 531 |
+
* Complete voice pipeline using ALL 6 QVAC modules:
|
| 532 |
+
*
|
| 533 |
+
* π€ Audio β [transcription-whispercpp] β text
|
| 534 |
+
* β [translation-nmtcpp] β English (if non-English)
|
| 535 |
+
* β [llm-llamacpp] β agent reasoning + tool calls
|
| 536 |
+
* β [embed-llamacpp] β RAG context + contact resolution
|
| 537 |
+
* β [translation-nmtcpp] β user's language (if translated)
|
| 538 |
+
* β [tts-onnx] β spoken response
|
| 539 |
+
*
|
| 540 |
+
* This is the deepest QVAC integration: one voice command can
|
| 541 |
+
* trigger all 6 packages in a single processing chain.
|
| 542 |
*/
|
| 543 |
+
async processVoiceCommand(audioBuffer: Buffer, walletContext?: any): Promise<VoiceResult> {
|
| 544 |
+
const steps: PipelineStep[] = [];
|
| 545 |
+
|
| 546 |
+
// ββ Step 1: Transcription (@qvac/transcription-whispercpp) ββ
|
| 547 |
+
if (!this.status.transcription) throw new Error('Speech-to-text not available');
|
| 548 |
+
const t0 = Date.now();
|
| 549 |
+
const transcription = await this.qvac.transcription.transcribe(audioBuffer);
|
| 550 |
+
steps.push({ module: '@qvac/transcription-whispercpp', operation: 'Speech β Text', input: `${(audioBuffer.length / 1024).toFixed(0)}KB audio`, output: transcription.slice(0, 60), durationMs: Date.now() - t0 });
|
| 551 |
+
|
| 552 |
+
// ββ Step 2: Language detection + translation (@qvac/translation-nmtcpp) ββ
|
| 553 |
+
let processText = transcription;
|
| 554 |
+
let detectedLang: string | undefined;
|
| 555 |
+
let translatedText: string | undefined;
|
| 556 |
+
|
| 557 |
+
if (this.status.translation && this.looksNonEnglish(transcription)) {
|
| 558 |
+
const t1 = Date.now();
|
| 559 |
+
try {
|
| 560 |
+
translatedText = await this.qvac.translation.translate(transcription, { to: 'en' });
|
| 561 |
+
processText = translatedText;
|
| 562 |
+
detectedLang = 'auto';
|
| 563 |
+
steps.push({ module: '@qvac/translation-nmtcpp', operation: 'Translate β English', input: transcription.slice(0, 40), output: translatedText.slice(0, 40), durationMs: Date.now() - t1 });
|
| 564 |
+
} catch { /* keep original */ }
|
|
|
|
|
|
|
|
|
|
| 565 |
}
|
| 566 |
|
| 567 |
+
// ββ Step 3: Agent processing (@qvac/llm-llamacpp + @qvac/embed-llamacpp) ββ
|
| 568 |
+
const agentResult = await this.runAgent(processText, walletContext);
|
| 569 |
+
steps.push(...agentResult.pipelineSteps);
|
|
|
|
| 570 |
|
| 571 |
+
// ββ Step 4: Translate response back (@qvac/translation-nmtcpp) ββ
|
| 572 |
+
let finalResponse = agentResult.response;
|
| 573 |
+
if (detectedLang && this.status.translation) {
|
| 574 |
+
const t2 = Date.now();
|
| 575 |
+
try {
|
| 576 |
+
finalResponse = await this.qvac.translation.translate(agentResult.response, { from: 'en' });
|
| 577 |
+
steps.push({ module: '@qvac/translation-nmtcpp', operation: 'Translate response β user language', input: agentResult.response.slice(0, 40), output: finalResponse.slice(0, 40), durationMs: Date.now() - t2 });
|
| 578 |
+
} catch { /* keep English */ }
|
| 579 |
}
|
| 580 |
|
| 581 |
+
// ββ Step 5: Speech synthesis (@qvac/tts-onnx) ββ
|
| 582 |
+
let responseAudio: Buffer | undefined;
|
| 583 |
+
if (this.status.tts) {
|
| 584 |
+
const t3 = Date.now();
|
| 585 |
+
try {
|
| 586 |
+
responseAudio = await this.qvac.tts.synthesize(finalResponse);
|
| 587 |
+
steps.push({ module: '@qvac/tts-onnx', operation: 'Text β Speech', input: finalResponse.slice(0, 40), output: `${(responseAudio.length / 1024).toFixed(0)}KB audio`, durationMs: Date.now() - t3 });
|
| 588 |
+
} catch {}
|
| 589 |
}
|
| 590 |
|
| 591 |
+
return { transcription, detectedLanguage: detectedLang, translatedText, agentResult: { ...agentResult, response: finalResponse }, responseAudio, pipelineSteps: steps };
|
| 592 |
}
|
| 593 |
|
| 594 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 595 |
+
// 5. DOCUMENT-TO-PAYMENT (@qvac/ocr-onnx β @qvac/llm-llamacpp)
|
| 596 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 597 |
+
|
| 598 |
/**
|
| 599 |
+
* Takes an image (invoice, QR code, screenshot), extracts text via OCR,
|
| 600 |
+
* then uses the LLM to parse the unstructured text into structured
|
| 601 |
+
* payment data. Two QVAC packages working in series.
|
| 602 |
*/
|
| 603 |
+
async processDocumentToPayment(imageBuffer: Buffer): Promise<OCRPaymentResult> {
|
| 604 |
+
const steps: PipelineStep[] = [];
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 605 |
|
| 606 |
+
// ββ Step 1: OCR extraction (@qvac/ocr-onnx) ββ
|
| 607 |
+
if (!this.status.ocr) throw new Error('OCR not available');
|
| 608 |
+
const t0 = Date.now();
|
| 609 |
+
const rawText = await this.qvac.ocr.recognize(imageBuffer, { format: 'text' });
|
| 610 |
+
steps.push({ module: '@qvac/ocr-onnx', operation: 'Image β Text (OCR)', input: `${(imageBuffer.length / 1024).toFixed(0)}KB image`, output: rawText.slice(0, 60), durationMs: Date.now() - t0 });
|
| 611 |
+
|
| 612 |
+
// ββ Step 2: LLM structured extraction (@qvac/llm-llamacpp) ββ
|
| 613 |
+
let extractedData = { amount: undefined as number | undefined, token: undefined as string | undefined, recipient: undefined as string | undefined, memo: undefined as string | undefined, confidence: 0 };
|
|
|
|
|
|
|
| 614 |
|
| 615 |
+
if (this.status.llm) {
|
| 616 |
+
const t1 = Date.now();
|
| 617 |
+
const prompt = OCR_EXTRACTION_PROMPT.replace('{TEXT}', rawText);
|
| 618 |
+
try {
|
| 619 |
+
const resp = await this.qvac.llm.chat([
|
| 620 |
+
{ role: 'system', content: 'Extract payment data from OCR text. Return ONLY JSON.' },
|
| 621 |
+
{ role: 'user', content: prompt },
|
| 622 |
+
], { maxTokens: 256, temperature: 0.1 });
|
| 623 |
+
|
| 624 |
+
const match = resp.match(/\{[\s\S]*\}/);
|
| 625 |
+
if (match) extractedData = JSON.parse(match[0]);
|
| 626 |
+
steps.push({ module: '@qvac/llm-llamacpp', operation: 'Parse payment data from OCR', input: rawText.slice(0, 40), output: JSON.stringify(extractedData).slice(0, 60), durationMs: Date.now() - t1 });
|
| 627 |
+
} catch {}
|
| 628 |
}
|
| 629 |
+
|
| 630 |
+
return { rawText, extractedData, pipelineSteps: steps };
|
| 631 |
}
|
| 632 |
|
| 633 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 634 |
+
// 6. TRANSACTION AUTO-INDEX (@qvac/embed-llamacpp)
|
| 635 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 636 |
+
|
| 637 |
/**
|
| 638 |
+
* Every completed transaction is automatically embedded and indexed
|
| 639 |
+
* for future RAG retrieval. This means the AI's knowledge of your
|
| 640 |
+
* wallet grows with every transaction β entirely local.
|
| 641 |
*/
|
| 642 |
+
async indexTransaction(tx: { signature: string; amount: number; token: string; to: string; toLabel?: string; timestamp: number }): Promise<void> {
|
| 643 |
+
if (!this.status.embed) return;
|
| 644 |
+
const text = `Sent ${tx.amount} ${tx.token} to ${tx.toLabel || tx.to.slice(0, 8)} on ${new Date(tx.timestamp).toLocaleDateString()}. TX: ${tx.signature.slice(0, 12)}`;
|
| 645 |
+
try {
|
| 646 |
+
const vec = await this.qvac.embed.embed(text);
|
| 647 |
+
this.vectorStore.add(`tx_${tx.signature}`, text, vec, { amount: tx.amount, token: tx.token, to: tx.to, toLabel: tx.toLabel, signature: tx.signature, timestamp: tx.timestamp }, 'transaction');
|
| 648 |
+
} catch {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 649 |
}
|
| 650 |
|
| 651 |
/**
|
| 652 |
+
* Index knowledge (Solana concepts, DeFi terms, etc.) for the AI to reference.
|
| 653 |
*/
|
| 654 |
+
async indexKnowledge(text: string, metadata: Record<string, any> = {}): Promise<void> {
|
| 655 |
+
if (!this.status.embed) return;
|
| 656 |
+
try {
|
| 657 |
+
const vec = await this.qvac.embed.embed(text);
|
| 658 |
+
this.vectorStore.add(`kb_${Date.now()}`, text, vec, metadata, 'knowledge');
|
| 659 |
+
} catch {}
|
| 660 |
+
}
|
| 661 |
+
|
| 662 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 663 |
+
// PUBLIC API (for IPC handlers)
|
| 664 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 665 |
+
|
| 666 |
+
async chat(message: string, walletContext?: any): Promise<AgentResult> {
|
| 667 |
+
return this.runAgent(message, walletContext);
|
| 668 |
+
}
|
| 669 |
+
|
| 670 |
+
async semanticSearch(query: string, topK: number = 5, category?: string): Promise<RAGResult[]> {
|
| 671 |
+
if (!this.status.embed) return [];
|
| 672 |
+
try {
|
| 673 |
+
const vec = await this.qvac.embed.embed(query);
|
| 674 |
+
return this.vectorStore.search(vec, topK, category as any);
|
| 675 |
+
} catch { return []; }
|
| 676 |
+
}
|
| 677 |
+
|
| 678 |
async speak(text: string): Promise<Buffer> {
|
| 679 |
+
if (!this.status.tts) throw new Error('TTS not available');
|
|
|
|
|
|
|
| 680 |
return this.qvac.tts.synthesize(text);
|
| 681 |
}
|
| 682 |
|
|
|
|
|
|
|
|
|
|
| 683 |
async translate(text: string, from: string, to: string): Promise<string> {
|
| 684 |
+
if (!this.status.translation) throw new Error('Translation not available');
|
|
|
|
|
|
|
| 685 |
return this.qvac.translation.translate(text, { from, to });
|
| 686 |
}
|
| 687 |
|
|
|
|
|
|
|
|
|
|
| 688 |
async embed(text: string): Promise<number[]> {
|
| 689 |
+
if (!this.status.embed) throw new Error('Embeddings not available');
|
|
|
|
|
|
|
| 690 |
return this.qvac.embed.embed(text);
|
| 691 |
}
|
| 692 |
|
|
|
|
|
|
|
|
|
|
| 693 |
async ocr(imageBuffer: Buffer): Promise<string> {
|
| 694 |
+
if (!this.status.ocr) throw new Error('OCR not available');
|
|
|
|
|
|
|
| 695 |
return this.qvac.ocr.recognize(imageBuffer, { format: 'text' });
|
| 696 |
}
|
| 697 |
|
| 698 |
+
getStatus(): AIStatus { return { ...this.status }; }
|
| 699 |
+
getContacts(): Contact[] { return Array.from(this.contacts.values()); }
|
|
|
|
|
|
|
|
|
|
| 700 |
|
| 701 |
+
shutdown(): void {
|
| 702 |
+
this.status = { llm: false, embed: false, transcription: false, tts: false, translation: false, ocr: false, initialized: false };
|
|
|
|
|
|
|
|
|
|
|
|
|
| 703 |
}
|
| 704 |
|
| 705 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 706 |
+
// PRIVATE HELPERS
|
| 707 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
|
|
|
|
|
|
| 708 |
|
| 709 |
+
private looksNonEnglish(text: string): boolean {
|
| 710 |
+
// Simple heuristic: if >30% non-ASCII, likely non-English
|
| 711 |
+
const nonAscii = text.split('').filter(c => c.charCodeAt(0) > 127).length;
|
| 712 |
+
return nonAscii / Math.max(text.length, 1) > 0.3;
|
|
|
|
|
|
|
|
|
|
| 713 |
}
|
| 714 |
|
| 715 |
+
private fallbackAgent(message: string, steps: PipelineStep[]): AgentResult {
|
| 716 |
+
const lower = message.toLowerCase();
|
| 717 |
+
const actions: AgentAction[] = [];
|
| 718 |
+
let response = "I didn't understand that. Try 'send 5 SOL to Alice' or 'check my balance'.";
|
| 719 |
+
let requiresConfirmation = false;
|
| 720 |
+
|
| 721 |
+
const sendMatch = lower.match(/(?:send|transfer|pay)\s+(\d+(?:\.\d+)?)\s*(sol|usdt)?\s*(?:to\s+)?(.+)?/i);
|
| 722 |
+
if (sendMatch) {
|
| 723 |
+
const amount = parseFloat(sendMatch[1]);
|
| 724 |
+
const token = (sendMatch[2] || 'SOL').toUpperCase();
|
| 725 |
+
const to = sendMatch[3]?.trim() || '';
|
| 726 |
+
actions.push({ tool: 'confirm_transaction', params: { amount, token, to, toLabel: to }, reasoning: 'Regex fallback parsed send command' });
|
| 727 |
+
response = `Send ${amount} ${token} to ${to}? Please confirm.`;
|
| 728 |
+
requiresConfirmation = true;
|
| 729 |
+
} else if (/balance|how much|funds/.test(lower)) {
|
| 730 |
+
actions.push({ tool: 'check_balance', params: {}, reasoning: 'Balance inquiry' });
|
| 731 |
+
response = 'Checking your balance...';
|
| 732 |
+
} else if (/history|transactions?|recent/.test(lower)) {
|
| 733 |
+
actions.push({ tool: 'get_history', params: { limit: 5 }, reasoning: 'History request' });
|
| 734 |
+
response = 'Loading recent transactions...';
|
| 735 |
+
} else if (/address|receive|deposit/.test(lower)) {
|
| 736 |
+
actions.push({ tool: 'get_address', params: {}, reasoning: 'Address request' });
|
| 737 |
+
response = 'Here is your wallet address.';
|
| 738 |
+
}
|
| 739 |
+
|
| 740 |
+
return { actions, response, requiresConfirmation, pipelineSteps: steps };
|
| 741 |
}
|
| 742 |
|
| 743 |
+
private loadContacts(userDataPath: string): void {
|
| 744 |
+
try {
|
| 745 |
+
const p = path.join(userDataPath, 'contacts.json');
|
| 746 |
+
if (fs.existsSync(p)) {
|
| 747 |
+
const data: Contact[] = JSON.parse(fs.readFileSync(p, 'utf8'));
|
| 748 |
+
data.forEach(c => this.contacts.set(c.address, c));
|
| 749 |
+
}
|
| 750 |
+
} catch {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 751 |
}
|
| 752 |
|
| 753 |
+
private saveContacts(): void {
|
| 754 |
+
try {
|
| 755 |
+
const p = path.join(app?.getPath('userData') ?? '/tmp/solvox', 'contacts.json');
|
| 756 |
+
fs.writeFileSync(p, JSON.stringify(Array.from(this.contacts.values())));
|
| 757 |
+
} catch {}
|
| 758 |
+
}
|
| 759 |
|
| 760 |
private getModelsDir(): string {
|
| 761 |
+
return app?.isPackaged ? path.join(process.resourcesPath, 'models') : path.join(__dirname, '../../..', 'models');
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 762 |
}
|
| 763 |
}
|
src/main/main.ts
CHANGED
|
@@ -291,100 +291,220 @@ function registerIPCHandlers(): void {
|
|
| 291 |
return transactionGuard!.getAnomalyLog();
|
| 292 |
});
|
| 293 |
|
| 294 |
-
//
|
| 295 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 296 |
try {
|
| 297 |
-
await
|
| 298 |
-
|
| 299 |
-
|
| 300 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 301 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 302 |
});
|
| 303 |
|
| 304 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 305 |
try {
|
| 306 |
-
const
|
| 307 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 308 |
} catch (error: any) {
|
| 309 |
return { success: false, error: error.message };
|
| 310 |
}
|
| 311 |
});
|
| 312 |
|
| 313 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 314 |
try {
|
| 315 |
-
const
|
| 316 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 317 |
} catch (error: any) {
|
| 318 |
return { success: false, error: error.message };
|
| 319 |
}
|
| 320 |
});
|
| 321 |
|
| 322 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 323 |
try {
|
| 324 |
-
|
| 325 |
-
return { success:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 326 |
} catch (error: any) {
|
| 327 |
return { success: false, error: error.message };
|
| 328 |
}
|
| 329 |
});
|
| 330 |
|
| 331 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 332 |
try {
|
| 333 |
-
const
|
| 334 |
-
return { success: true,
|
| 335 |
} catch (error: any) {
|
| 336 |
return { success: false, error: error.message };
|
| 337 |
}
|
| 338 |
});
|
| 339 |
|
| 340 |
-
|
|
|
|
|
|
|
|
|
|
| 341 |
try {
|
| 342 |
-
const
|
| 343 |
-
return { success: true,
|
| 344 |
} catch (error: any) {
|
| 345 |
return { success: false, error: error.message };
|
| 346 |
}
|
| 347 |
});
|
| 348 |
|
| 349 |
-
|
|
|
|
|
|
|
|
|
|
| 350 |
try {
|
| 351 |
-
const
|
| 352 |
-
return { success: true,
|
| 353 |
} catch (error: any) {
|
| 354 |
return { success: false, error: error.message };
|
| 355 |
}
|
| 356 |
});
|
| 357 |
|
| 358 |
-
ipcMain.handle('ai:
|
| 359 |
try {
|
| 360 |
-
|
| 361 |
-
return { success: true
|
| 362 |
} catch (error: any) {
|
| 363 |
return { success: false, error: error.message };
|
| 364 |
}
|
| 365 |
});
|
| 366 |
|
| 367 |
-
ipcMain.handle('ai:
|
| 368 |
-
return qvacEngine!.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 369 |
});
|
| 370 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 371 |
// ββ RAG / Semantic Search ββ
|
| 372 |
-
ipcMain.handle('rag:search', async (_, query: string) => {
|
| 373 |
-
try {
|
| 374 |
-
|
| 375 |
-
return { success: true, results };
|
| 376 |
-
} catch (error: any) {
|
| 377 |
-
return { success: false, error: error.message };
|
| 378 |
-
}
|
| 379 |
});
|
| 380 |
|
| 381 |
-
ipcMain.handle('rag:
|
| 382 |
-
try {
|
| 383 |
-
|
| 384 |
-
return { success: true };
|
| 385 |
-
} catch (error: any) {
|
| 386 |
-
return { success: false, error: error.message };
|
| 387 |
-
}
|
| 388 |
});
|
| 389 |
}
|
| 390 |
|
|
|
|
| 291 |
return transactionGuard!.getAnomalyLog();
|
| 292 |
});
|
| 293 |
|
| 294 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 295 |
+
// AI / QVAC Operations β Deep Integration
|
| 296 |
+
// The AI agent can directly execute wallet operations through tool
|
| 297 |
+
// calls. This is the core QVAC integration: the LLM drives the wallet.
|
| 298 |
+
// βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 299 |
+
|
| 300 |
+
/** Helper: build wallet context for the agent */
|
| 301 |
+
async function getWalletContext() {
|
| 302 |
try {
|
| 303 |
+
const balance = await walletService!.getBalance();
|
| 304 |
+
const address = walletService!.getPublicKey();
|
| 305 |
+
return { balance, address };
|
| 306 |
+
} catch { return {}; }
|
| 307 |
+
}
|
| 308 |
+
|
| 309 |
+
/**
|
| 310 |
+
* Execute agent tool calls against the actual wallet.
|
| 311 |
+
* The LLM decides WHAT to do; this function DOES it.
|
| 312 |
+
*/
|
| 313 |
+
async function executeAgentActions(actions: any[]): Promise<Record<string, any>> {
|
| 314 |
+
const results: Record<string, any> = {};
|
| 315 |
+
for (const action of actions) {
|
| 316 |
+
try {
|
| 317 |
+
switch (action.tool) {
|
| 318 |
+
case 'check_balance':
|
| 319 |
+
results.balance = await walletService!.getBalance();
|
| 320 |
+
break;
|
| 321 |
+
case 'get_history':
|
| 322 |
+
results.history = await walletService!.getTransactionHistory(action.params?.limit || 5);
|
| 323 |
+
break;
|
| 324 |
+
case 'get_address':
|
| 325 |
+
results.address = walletService!.getPublicKey();
|
| 326 |
+
break;
|
| 327 |
+
case 'resolve_contact':
|
| 328 |
+
results.contact = await qvacEngine!.resolveContact(action.params?.query || '');
|
| 329 |
+
break;
|
| 330 |
+
case 'search_knowledge':
|
| 331 |
+
results.knowledge = await qvacEngine!.semanticSearch(action.params?.query || '', 3);
|
| 332 |
+
break;
|
| 333 |
+
// send_sol, send_usdt, confirm_transaction are NOT auto-executed β
|
| 334 |
+
// they require explicit user confirmation from the frontend
|
| 335 |
+
}
|
| 336 |
+
} catch (e: any) {
|
| 337 |
+
results[`${action.tool}_error`] = e.message;
|
| 338 |
+
}
|
| 339 |
}
|
| 340 |
+
return results;
|
| 341 |
+
}
|
| 342 |
+
|
| 343 |
+
ipcMain.handle('ai:initialize', async () => {
|
| 344 |
+
try { await qvacEngine!.initialize(); return { success: true }; }
|
| 345 |
+
catch (error: any) { return { success: false, error: error.message }; }
|
| 346 |
});
|
| 347 |
|
| 348 |
+
/**
|
| 349 |
+
* AI Agent Chat β The LLM reasons about the user's message,
|
| 350 |
+
* selects tools, and we execute them against the real wallet.
|
| 351 |
+
* Returns the agent's response + any tool results + pipeline trace.
|
| 352 |
+
*/
|
| 353 |
+
ipcMain.handle('ai:chat', async (_, message: string) => {
|
| 354 |
try {
|
| 355 |
+
const ctx = await getWalletContext();
|
| 356 |
+
const agentResult = await qvacEngine!.chat(message, ctx);
|
| 357 |
+
|
| 358 |
+
// Execute non-destructive tool calls automatically
|
| 359 |
+
const toolResults = await executeAgentActions(
|
| 360 |
+
agentResult.actions.filter((a: any) => !['send_sol', 'send_usdt', 'confirm_transaction'].includes(a.tool))
|
| 361 |
+
);
|
| 362 |
+
|
| 363 |
+
// Auto-index this interaction for future RAG
|
| 364 |
+
await qvacEngine!.indexKnowledge(`User asked: "${message}" β AI responded about ${agentResult.actions.map((a: any) => a.tool).join(', ') || 'general help'}`);
|
| 365 |
+
|
| 366 |
+
return { success: true, ...agentResult, toolResults };
|
| 367 |
} catch (error: any) {
|
| 368 |
return { success: false, error: error.message };
|
| 369 |
}
|
| 370 |
});
|
| 371 |
|
| 372 |
+
/**
|
| 373 |
+
* Voice Agent β Full 6-module QVAC pipeline:
|
| 374 |
+
* Whisper STT β Translation β LLM Agent β Embeddings RAG β
|
| 375 |
+
* Translation back β Piper TTS
|
| 376 |
+
*/
|
| 377 |
+
ipcMain.handle('ai:processVoice', async (_, audioData: ArrayBuffer) => {
|
| 378 |
try {
|
| 379 |
+
const ctx = await getWalletContext();
|
| 380 |
+
const result = await qvacEngine!.processVoiceCommand(Buffer.from(audioData), ctx);
|
| 381 |
+
|
| 382 |
+
// Execute non-destructive agent tools
|
| 383 |
+
const toolResults = await executeAgentActions(
|
| 384 |
+
result.agentResult.actions.filter((a: any) => !['send_sol', 'send_usdt', 'confirm_transaction'].includes(a.tool))
|
| 385 |
+
);
|
| 386 |
+
|
| 387 |
+
// Auto-index voice interaction
|
| 388 |
+
await qvacEngine!.indexKnowledge(`Voice command: "${result.transcription}" β processed via QVAC pipeline`);
|
| 389 |
+
|
| 390 |
+
return { success: true, ...result, toolResults };
|
| 391 |
} catch (error: any) {
|
| 392 |
return { success: false, error: error.message };
|
| 393 |
}
|
| 394 |
});
|
| 395 |
|
| 396 |
+
/**
|
| 397 |
+
* Execute a confirmed transaction β called AFTER the user confirms
|
| 398 |
+
* a pending transaction that the AI agent proposed.
|
| 399 |
+
*/
|
| 400 |
+
ipcMain.handle('ai:executeConfirmed', async (_, { token, amount, to }: { token: string; amount: number; to: string }) => {
|
| 401 |
try {
|
| 402 |
+
// Run all security checks
|
| 403 |
+
if (!transactionGuard!.validateAddress(to)) return { success: false, error: 'Invalid address' };
|
| 404 |
+
if (!transactionGuard!.validateAmount(amount)) return { success: false, error: 'Invalid amount' };
|
| 405 |
+
|
| 406 |
+
const limitCheck = await transactionGuard!.checkTransactionLimits(amount, token);
|
| 407 |
+
if (!limitCheck.allowed) return { success: false, error: limitCheck.reason };
|
| 408 |
+
|
| 409 |
+
const wlCheck = transactionGuard!.checkWhitelist(to);
|
| 410 |
+
if (!wlCheck.allowed) return { success: false, error: wlCheck.reason };
|
| 411 |
+
|
| 412 |
+
// AI risk assessment before execution
|
| 413 |
+
const risk = await qvacEngine!.assessTransactionRisk(amount, token, to);
|
| 414 |
+
if (risk.level === 'danger') {
|
| 415 |
+
return { success: false, error: `AI blocked: ${risk.recommendation}`, risk };
|
| 416 |
+
}
|
| 417 |
+
|
| 418 |
+
// Execute
|
| 419 |
+
const sig = token === 'SOL'
|
| 420 |
+
? await walletService!.sendSOL(to, amount)
|
| 421 |
+
: await walletService!.sendUSDT(to, amount);
|
| 422 |
+
|
| 423 |
+
await transactionGuard!.recordTransaction(amount, token, to);
|
| 424 |
+
|
| 425 |
+
// Auto-index the transaction in the vector store for future RAG
|
| 426 |
+
await qvacEngine!.indexTransaction({
|
| 427 |
+
signature: sig, amount, token, to,
|
| 428 |
+
timestamp: Date.now(),
|
| 429 |
+
});
|
| 430 |
+
|
| 431 |
+
return { success: true, signature: sig, explorer: `https://solscan.io/tx/${sig}`, risk };
|
| 432 |
} catch (error: any) {
|
| 433 |
return { success: false, error: error.message };
|
| 434 |
}
|
| 435 |
});
|
| 436 |
|
| 437 |
+
/**
|
| 438 |
+
* OCR β Payment extraction pipeline
|
| 439 |
+
* Uses @qvac/ocr-onnx β @qvac/llm-llamacpp in series
|
| 440 |
+
*/
|
| 441 |
+
ipcMain.handle('ai:ocrPayment', async (_, imageData: ArrayBuffer) => {
|
| 442 |
try {
|
| 443 |
+
const result = await qvacEngine!.processDocumentToPayment(Buffer.from(imageData));
|
| 444 |
+
return { success: true, ...result };
|
| 445 |
} catch (error: any) {
|
| 446 |
return { success: false, error: error.message };
|
| 447 |
}
|
| 448 |
});
|
| 449 |
|
| 450 |
+
/**
|
| 451 |
+
* AI Risk Assessment β analyze a transaction before execution
|
| 452 |
+
*/
|
| 453 |
+
ipcMain.handle('ai:assessRisk', async (_, { amount, token, to }: { amount: number; token: string; to: string }) => {
|
| 454 |
try {
|
| 455 |
+
const risk = await qvacEngine!.assessTransactionRisk(amount, token, to);
|
| 456 |
+
return { success: true, risk };
|
| 457 |
} catch (error: any) {
|
| 458 |
return { success: false, error: error.message };
|
| 459 |
}
|
| 460 |
});
|
| 461 |
|
| 462 |
+
/**
|
| 463 |
+
* Contact resolution via semantic embeddings
|
| 464 |
+
*/
|
| 465 |
+
ipcMain.handle('ai:resolveContact', async (_, query: string) => {
|
| 466 |
try {
|
| 467 |
+
const contact = await qvacEngine!.resolveContact(query);
|
| 468 |
+
return { success: true, contact };
|
| 469 |
} catch (error: any) {
|
| 470 |
return { success: false, error: error.message };
|
| 471 |
}
|
| 472 |
});
|
| 473 |
|
| 474 |
+
ipcMain.handle('ai:addContact', async (_, contact: any) => {
|
| 475 |
try {
|
| 476 |
+
await qvacEngine!.addContact(contact);
|
| 477 |
+
return { success: true };
|
| 478 |
} catch (error: any) {
|
| 479 |
return { success: false, error: error.message };
|
| 480 |
}
|
| 481 |
});
|
| 482 |
|
| 483 |
+
ipcMain.handle('ai:getContacts', async () => {
|
| 484 |
+
return qvacEngine!.getContacts();
|
| 485 |
+
});
|
| 486 |
+
|
| 487 |
+
ipcMain.handle('ai:speak', async (_, text: string) => {
|
| 488 |
+
try { return { success: true, audio: await qvacEngine!.speak(text) }; }
|
| 489 |
+
catch (error: any) { return { success: false, error: error.message }; }
|
| 490 |
});
|
| 491 |
|
| 492 |
+
ipcMain.handle('ai:translate', async (_, text: string, from: string, to: string) => {
|
| 493 |
+
try { return { success: true, translated: await qvacEngine!.translate(text, from, to) }; }
|
| 494 |
+
catch (error: any) { return { success: false, error: error.message }; }
|
| 495 |
+
});
|
| 496 |
+
|
| 497 |
+
ipcMain.handle('ai:getStatus', async () => qvacEngine!.getStatus());
|
| 498 |
+
|
| 499 |
// ββ RAG / Semantic Search ββ
|
| 500 |
+
ipcMain.handle('rag:search', async (_, query: string, category?: string) => {
|
| 501 |
+
try { return { success: true, results: await qvacEngine!.semanticSearch(query, 5, category) }; }
|
| 502 |
+
catch (error: any) { return { success: false, error: error.message }; }
|
|
|
|
|
|
|
|
|
|
|
|
|
| 503 |
});
|
| 504 |
|
| 505 |
+
ipcMain.handle('rag:index', async (_, text: string, metadata: any) => {
|
| 506 |
+
try { await qvacEngine!.indexKnowledge(text, metadata); return { success: true }; }
|
| 507 |
+
catch (error: any) { return { success: false, error: error.message }; }
|
|
|
|
|
|
|
|
|
|
|
|
|
| 508 |
});
|
| 509 |
}
|
| 510 |
|
src/main/preload.ts
CHANGED
|
@@ -46,26 +46,36 @@ contextBridge.exposeInMainWorld('solvox', {
|
|
| 46 |
getAnomalies: () => ipcRenderer.invoke('security:getAnomalies'),
|
| 47 |
},
|
| 48 |
|
| 49 |
-
// ββ AI / QVAC
|
| 50 |
ai: {
|
| 51 |
initialize: () => ipcRenderer.invoke('ai:initialize'),
|
| 52 |
-
|
| 53 |
-
ipcRenderer.invoke('ai:processVoice', audioData),
|
| 54 |
chat: (message: string) => ipcRenderer.invoke('ai:chat', message),
|
| 55 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 56 |
speak: (text: string) => ipcRenderer.invoke('ai:speak', text),
|
| 57 |
translate: (text: string, from: string, to: string) =>
|
| 58 |
ipcRenderer.invoke('ai:translate', text, from, to),
|
| 59 |
-
embed: (text: string) => ipcRenderer.invoke('ai:embed', text),
|
| 60 |
-
ocr: (imageData: ArrayBuffer) => ipcRenderer.invoke('ai:ocr', imageData),
|
| 61 |
getStatus: () => ipcRenderer.invoke('ai:getStatus'),
|
| 62 |
},
|
| 63 |
|
| 64 |
-
// ββ RAG /
|
| 65 |
rag: {
|
| 66 |
-
search: (query: string) => ipcRenderer.invoke('rag:search', query),
|
| 67 |
-
|
| 68 |
-
ipcRenderer.invoke('rag:addDocument', text, metadata),
|
| 69 |
},
|
| 70 |
|
| 71 |
// ββ Event Subscriptions (main β renderer) ββ
|
|
|
|
| 46 |
getAnomalies: () => ipcRenderer.invoke('security:getAnomalies'),
|
| 47 |
},
|
| 48 |
|
| 49 |
+
// ββ AI / QVAC Deep Integration ββ
|
| 50 |
ai: {
|
| 51 |
initialize: () => ipcRenderer.invoke('ai:initialize'),
|
| 52 |
+
// Agent chat β LLM reasons about tools, executes non-destructive ops
|
|
|
|
| 53 |
chat: (message: string) => ipcRenderer.invoke('ai:chat', message),
|
| 54 |
+
// Full 6-module voice pipeline: STT β Translation β LLM Agent β RAG β Translation β TTS
|
| 55 |
+
processVoice: (audioData: ArrayBuffer) => ipcRenderer.invoke('ai:processVoice', audioData),
|
| 56 |
+
// Execute a transaction the agent proposed (after user confirmation)
|
| 57 |
+
executeConfirmed: (tx: { token: string; amount: number; to: string }) =>
|
| 58 |
+
ipcRenderer.invoke('ai:executeConfirmed', tx),
|
| 59 |
+
// OCR β LLM payment extraction pipeline
|
| 60 |
+
ocrPayment: (imageData: ArrayBuffer) => ipcRenderer.invoke('ai:ocrPayment', imageData),
|
| 61 |
+
// AI risk assessment for a pending transaction
|
| 62 |
+
assessRisk: (tx: { amount: number; token: string; to: string }) =>
|
| 63 |
+
ipcRenderer.invoke('ai:assessRisk', tx),
|
| 64 |
+
// Semantic contact resolution
|
| 65 |
+
resolveContact: (query: string) => ipcRenderer.invoke('ai:resolveContact', query),
|
| 66 |
+
addContact: (contact: any) => ipcRenderer.invoke('ai:addContact', contact),
|
| 67 |
+
getContacts: () => ipcRenderer.invoke('ai:getContacts'),
|
| 68 |
+
// Direct QVAC module access
|
| 69 |
speak: (text: string) => ipcRenderer.invoke('ai:speak', text),
|
| 70 |
translate: (text: string, from: string, to: string) =>
|
| 71 |
ipcRenderer.invoke('ai:translate', text, from, to),
|
|
|
|
|
|
|
| 72 |
getStatus: () => ipcRenderer.invoke('ai:getStatus'),
|
| 73 |
},
|
| 74 |
|
| 75 |
+
// ββ RAG / Semantic Search ββ
|
| 76 |
rag: {
|
| 77 |
+
search: (query: string, category?: string) => ipcRenderer.invoke('rag:search', query, category),
|
| 78 |
+
index: (text: string, metadata: any) => ipcRenderer.invoke('rag:index', text, metadata),
|
|
|
|
| 79 |
},
|
| 80 |
|
| 81 |
// ββ Event Subscriptions (main β renderer) ββ
|