brainworm2024 commited on
Commit
4a90885
·
1 Parent(s): dcb7e13

Final ROCm-ready: real inference, orchestrator, proof, polished UI, HIP comments

Browse files
Files changed (7) hide show
  1. Cargo.toml +1 -1
  2. src/handlers/triage.rs +20 -35
  3. src/inference/qwen.rs +102 -10
  4. src/main.rs +47 -6
  5. src/orchestrator.rs +79 -0
  6. src/proof.rs +13 -0
  7. static/index.html +121 -0
Cargo.toml CHANGED
@@ -40,4 +40,4 @@ dotenvy = "0.15"
40
  [profile.release]
41
  lto = true
42
  codegen-units = 1
43
- opt-level = 3
 
40
  [profile.release]
41
  lto = true
42
  codegen-units = 1
43
+ opt-level = 3hex = "0.4"
src/handlers/triage.rs CHANGED
@@ -1,10 +1,7 @@
1
  use axum::{response::Json, http::StatusCode};
2
  use serde::{Deserialize, Serialize};
3
  use tracing::instrument;
4
-
5
- use crate::shield;
6
- use crate::inference;
7
- use crate::web3;
8
 
9
  #[derive(Debug, Deserialize)]
10
  pub struct TriageRequest {
@@ -15,11 +12,14 @@ pub struct TriageRequest {
15
  #[derive(Debug, Serialize)]
16
  pub struct TriageResponse {
17
  pub triage_result: String,
 
 
18
  pub transaction_hash: String,
19
- /// Redacted text sent to the model (for audit/demo)
20
  pub redacted_prompt: String,
21
- /// PII map (only for verification; never in production)
22
- pub pii_map: Vec<shield::redact::PiiMatch>,
 
 
23
  }
24
 
25
  #[instrument(skip_all)]
@@ -28,37 +28,22 @@ pub async fn handle(
28
  ) -> Result<Json<TriageResponse>, (StatusCode, String)> {
29
  tracing::info!("Received triage request (consent_hash: {})", payload.consent_hash);
30
 
31
- // 1. Zero‑Trust Shield: strip PII
32
- let (redacted_note, pii_matches) = shield::redact::redact_pii(&payload.patient_note);
33
-
34
- // 2. Inference on redacted text (GPU never sees PII)
35
- let triage_result = inference::qwen::generate(&redacted_note)
36
- .await
37
- .map_err(|e| {
38
- tracing::error!("Inference failed: {:?}", e);
39
- (StatusCode::INTERNAL_SERVER_ERROR, "Inference engine error".into())
40
- })?;
41
-
42
- // 3. Filecoin CID (immutable record of redacted prompt + result)
43
- let cid_input = format!("{}||{}", redacted_note, triage_result);
44
- let cid = web3::filecoin::generate_cid(&cid_input)
45
- .map_err(|e| {
46
- tracing::error!("CID generation failed: {:?}", e);
47
- (StatusCode::INTERNAL_SERVER_ERROR, "CID error".into())
48
- })?;
49
-
50
- // 4. Base L2 transaction (posts the CID)
51
- let tx_hash = web3::base_tx::commit_cid(&cid)
52
  .await
53
  .map_err(|e| {
54
- tracing::error!("Base L2 transaction failed: {:?}", e);
55
- (StatusCode::INTERNAL_SERVER_ERROR, "Blockchain error".into())
56
  })?;
57
 
58
  Ok(Json(TriageResponse {
59
- triage_result,
60
- transaction_hash: tx_hash,
61
- redacted_prompt: redacted_note,
62
- pii_map: pii_matches,
 
 
 
 
 
63
  }))
64
- }
 
1
  use axum::{response::Json, http::StatusCode};
2
  use serde::{Deserialize, Serialize};
3
  use tracing::instrument;
4
+ use crate::orchestrator;
 
 
 
5
 
6
  #[derive(Debug, Deserialize)]
7
  pub struct TriageRequest {
 
12
  #[derive(Debug, Serialize)]
13
  pub struct TriageResponse {
14
  pub triage_result: String,
15
+ pub model_used: String,
16
+ pub device_info: String,
17
  pub transaction_hash: String,
 
18
  pub redacted_prompt: String,
19
+ pub pii_map: Vec<crate::shield::redact::PiiMatch>,
20
+ pub cid: String,
21
+ pub redaction_proof: String,
22
+ pub agent_steps: Vec<orchestrator::AgentStep>,
23
  }
24
 
25
  #[instrument(skip_all)]
 
28
  ) -> Result<Json<TriageResponse>, (StatusCode, String)> {
29
  tracing::info!("Received triage request (consent_hash: {})", payload.consent_hash);
30
 
31
+ let output = orchestrator::run_triage(&payload.patient_note)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  .await
33
  .map_err(|e| {
34
+ tracing::error!("Triage pipeline error: {:?}", e);
35
+ (StatusCode::INTERNAL_SERVER_ERROR, "Triage pipeline error".into())
36
  })?;
37
 
38
  Ok(Json(TriageResponse {
39
+ triage_result: output.triage_result,
40
+ model_used: output.model_used,
41
+ device_info: output.device_info,
42
+ transaction_hash: output.transaction_hash,
43
+ redacted_prompt: output.redacted_prompt,
44
+ pii_map: output.pii_map,
45
+ cid: output.cid,
46
+ redaction_proof: output.redaction_proof,
47
+ agent_steps: output.agent_steps,
48
  }))
49
+ }
src/inference/qwen.rs CHANGED
@@ -1,10 +1,102 @@
1
- use anyhow::Result;
2
-
3
- /// Mock inference for local testing.
4
- /// In production (AMD Cloud), this will load the Qwen-72B model via Candle + ROCm.
5
- pub async fn generate(_redacted_prompt: &str) -> Result<String> {
6
- tracing::info!("[MOCK] GPU inference skipped — returning placeholder");
7
- // Simulate some processing
8
- tokio::time::sleep(std::time::Duration::from_millis(10)).await;
9
- Ok("Triage result: non‑urgent (mock)".to_string())
10
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ use anyhow::{Context, Result};
2
+ use candle_core::{DType, Device, Tensor};
3
+ use candle_nn::VarBuilder;
4
+ use candle_transformers::generation::{LogitsProcessor, Sampling};
5
+ use candle_transformers::models::qwen2::{Config, Model};
6
+ use hf_hub::api::sync::Api;
7
+ use tokenizers::Tokenizer;
8
+ use std::sync::Arc;
9
+ use tokio::sync::OnceCell;
10
+
11
+ static MODEL: OnceCell<Arc<LoadedModel>> = OnceCell::const_new();
12
+
13
+ struct LoadedModel {
14
+ model: Model,
15
+ tokenizer: Tokenizer,
16
+ device: Device,
17
+ model_name: String,
18
+ }
19
+
20
+ async fn load_model() -> Result<Arc<LoadedModel>> {
21
+ MODEL
22
+ .get_or_try_init(|| async {
23
+ let use_7b = std::env::var("FORCE_0_5B").unwrap_or_default() != "1";
24
+ let (model_id, model_name) = if use_7b {
25
+ ("Qwen/Qwen2.5-7B-Instruct", "7B")
26
+ } else {
27
+ ("Qwen/Qwen2.5-0.5B-Instruct", "0.5B")
28
+ };
29
+
30
+ let device = if std::env::var("ENABLE_ROCM").unwrap_or_default() == "1" {
31
+ Device::new_hip(0).unwrap_or_else(|e| {
32
+ tracing::warn!("HIP device not available: {}; falling back to CPU", e);
33
+ Device::Cpu
34
+ })
35
+ } else {
36
+ Device::Cpu
37
+ };
38
+
39
+ tracing::info!("Loading model {} on {:?}", model_id, device);
40
+
41
+ let api = Api::new()?;
42
+ let repo = api.model(model_id.to_string());
43
+ let model_path = repo.get("model.safetensors")?;
44
+ let config_path = repo.get("config.json")?;
45
+ let tokenizer_path = repo.get("tokenizer.json")?;
46
+
47
+ let config: Config = serde_json::from_reader(std::fs::File::open(config_path)?)?;
48
+ let vb = unsafe { VarBuilder::from_mmaped_safetensors(&[model_path], DType::F32, &device)? };
49
+ let model = Model::new(&config, vb)?;
50
+ let tokenizer = Tokenizer::from_file(tokenizer_path).map_err(|e| anyhow::anyhow!(e))?;
51
+
52
+ Ok(Arc::new(LoadedModel { model, tokenizer, device, model_name: model_name.to_string() }))
53
+ })
54
+ .await
55
+ .map(Arc::clone)
56
+ }
57
+
58
+ /// Returns (generated_text, model_used, device_info)
59
+ pub async fn generate(redacted_prompt: &str) -> Result<(String, String, String)> {
60
+ match load_model().await {
61
+ Ok(loaded) => {
62
+ let prompt = format!("<|im_start|>user\n{}\n<|im_end|>\n<|im_start|>assistant\n", redacted_prompt);
63
+ let tokens = loaded.tokenizer.encode(prompt, true).map_err(|e| anyhow::anyhow!(e))?;
64
+ let input_ids = Tensor::new(tokens.get_ids(), &loaded.device)?.unsqueeze(0)?;
65
+ let mut output_ids = input_ids.clone();
66
+ let mut logits_processor = LogitsProcessor::from_sampling(Sampling::TopKTopP {
67
+ k: 50,
68
+ p: 0.9,
69
+ temperature: 0.7,
70
+ });
71
+ let eos_token_id = loaded.tokenizer.token_to_id("<|im_end|>").unwrap_or(151643);
72
+ let max_new_tokens = 250;
73
+ let mut generated_text = String::new();
74
+
75
+ // Candle currently recomputes full attention for each token.
76
+ // A KV cache would speed this up and is the first post‑hackathon optimisation.
77
+ // For real‑time streaming (SSE), the loop can yield tokens as they are sampled.
78
+ for _ in 0..max_new_tokens {
79
+ let logits = loaded.model.forward(&output_ids)?.squeeze(1)?;
80
+ let next_token = logits_processor.sample(&logits)?;
81
+ if next_token == eos_token_id {
82
+ break;
83
+ }
84
+ output_ids = Tensor::cat(&[output_ids, next_token.unsqueeze(0)?.unsqueeze(0)?], 1)?;
85
+ if let Ok(text) = loaded.tokenizer.decode(&[next_token as u32], false) {
86
+ generated_text.push_str(&text);
87
+ }
88
+ }
89
+
90
+ let device_info = format!("{:?}", loaded.device);
91
+ if generated_text.is_empty() {
92
+ Ok(("Unable to generate output.".to_string(), loaded.model_name.clone(), device_info))
93
+ } else {
94
+ Ok((generated_text.trim().to_string(), loaded.model_name.clone(), device_info))
95
+ }
96
+ }
97
+ Err(e) => {
98
+ tracing::warn!("Model load failed: {}; falling back to mock", e);
99
+ Ok(("Triage result: non‑urgent (mock – model unavailable)".to_string(), "mock".to_string(), "CPU (fallback)".to_string()))
100
+ }
101
+ }
102
+ }
src/main.rs CHANGED
@@ -1,29 +1,70 @@
1
- use axum::{routing::{get, post}, Router};
 
 
 
 
 
 
 
 
 
 
 
2
  use tower_http::trace::TraceLayer;
3
  use tracing_subscriber::EnvFilter;
4
 
5
- use rustvital_amd::handlers;
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
  #[tokio::main]
8
  async fn main() -> anyhow::Result<()> {
9
  dotenvy::dotenv().ok();
10
-
11
  tracing_subscriber::fmt()
12
  .with_env_filter(EnvFilter::from_default_env().add_directive("rustvital_amd=debug".parse()?))
13
  .init();
14
 
15
  let port = std::env::var("PORT").unwrap_or_else(|_| "3000".to_string());
16
  let addr = format!("0.0.0.0:{}", port);
17
-
18
  tracing::info!("Starting RustVital-AMD server on {}", addr);
19
 
20
  let app = Router::new()
21
- .route("/", get(|| async { "RustVital-AMD is running" }))
22
  .route("/health", get(|| async { "healthy" }))
 
23
  .route("/triage", post(handlers::triage::handle))
24
  .layer(TraceLayer::new_for_http());
25
 
26
  let listener = tokio::net::TcpListener::bind(&addr).await?;
27
  axum::serve(listener, app).await?;
28
  Ok(())
29
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // ============================================================================
2
+ // 🚀 AMD ROCm / HIP activation
3
+ //
4
+ // To run on real MI300X GPUs:
5
+ // 1. Set environment variable ENABLE_ROCM=1
6
+ // 2. Ensure the ROCm runtime is installed (hipcc, rocblas, etc.)
7
+ // 3. The model will automatically use Device::new_hip(0)
8
+ // 4. The /status endpoint will show "ROCm/HIP (MI300X)"
9
+ // ============================================================================
10
+
11
+ use axum::{routing::{get, post}, Router, response::Json};
12
+ use serde::Serialize;
13
  use tower_http::trace::TraceLayer;
14
  use tracing_subscriber::EnvFilter;
15
 
16
+ mod handlers;
17
+ mod inference;
18
+ mod lib;
19
+ mod shield;
20
+ mod web3;
21
+ mod orchestrator;
22
+ mod proof;
23
+
24
+ #[derive(Serialize)]
25
+ struct StatusResponse {
26
+ status: String,
27
+ model: String,
28
+ device: String,
29
+ }
30
 
31
  #[tokio::main]
32
  async fn main() -> anyhow::Result<()> {
33
  dotenvy::dotenv().ok();
 
34
  tracing_subscriber::fmt()
35
  .with_env_filter(EnvFilter::from_default_env().add_directive("rustvital_amd=debug".parse()?))
36
  .init();
37
 
38
  let port = std::env::var("PORT").unwrap_or_else(|_| "3000".to_string());
39
  let addr = format!("0.0.0.0:{}", port);
 
40
  tracing::info!("Starting RustVital-AMD server on {}", addr);
41
 
42
  let app = Router::new()
43
+ .route("/", get(serve_ui))
44
  .route("/health", get(|| async { "healthy" }))
45
+ .route("/status", get(status))
46
  .route("/triage", post(handlers::triage::handle))
47
  .layer(TraceLayer::new_for_http());
48
 
49
  let listener = tokio::net::TcpListener::bind(&addr).await?;
50
  axum::serve(listener, app).await?;
51
  Ok(())
52
+ }
53
+
54
+ async fn serve_ui() -> axum::response::Html<&'static str> {
55
+ axum::response::Html(include_str!("../static/index.html"))
56
+ }
57
+
58
+ async fn status() -> Json<StatusResponse> {
59
+ let device = if std::env::var("ENABLE_ROCM").unwrap_or_default() == "1" {
60
+ "ROCm/HIP (MI300X)"
61
+ } else {
62
+ "CPU"
63
+ };
64
+ let model = std::env::var("FORCE_0_5B").map_or("7B (Qwen2.5-7B-Instruct)".to_string(), |_| "0.5B (Qwen2.5-0.5B-Instruct)".to_string());
65
+ Json(StatusResponse {
66
+ status: "running".to_string(),
67
+ model,
68
+ device: device.to_string(),
69
+ })
70
+ }
src/orchestrator.rs ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ use crate::shield;
2
+ use crate::inference;
3
+ use crate::web3;
4
+ use crate::proof;
5
+ use anyhow::Result;
6
+ use serde::Serialize;
7
+ use std::time::Instant;
8
+
9
+ #[derive(Debug, Serialize)]
10
+ pub struct AgentStep {
11
+ pub name: String,
12
+ pub status: String,
13
+ pub duration_ms: u64,
14
+ pub reasoning: String,
15
+ }
16
+
17
+ #[derive(Debug, Serialize)]
18
+ pub struct TriageOutput {
19
+ pub redacted_prompt: String,
20
+ pub pii_map: Vec<shield::redact::PiiMatch>,
21
+ pub triage_result: String,
22
+ pub model_used: String,
23
+ pub cid: String,
24
+ pub transaction_hash: String,
25
+ pub redaction_proof: String,
26
+ pub device_info: String,
27
+ pub agent_steps: Vec<AgentStep>,
28
+ }
29
+
30
+ // The inference step uses the device selected in qwen.rs.
31
+ // To enable AMD HIP, set ENABLE_ROCM=1 before starting the server.
32
+ pub async fn run_triage(patient_note: &str) -> Result<TriageOutput> {
33
+ let mut steps = Vec::new();
34
+
35
+ // Shield agent
36
+ let shield_start = Instant::now();
37
+ let (redacted_prompt, pii_map) = shield::redact::redact_pii(patient_note);
38
+ let redaction_proof = proof::generate_proof(patient_note, &pii_map);
39
+ steps.push(AgentStep {
40
+ name: "Shield".into(),
41
+ status: "completed".into(),
42
+ duration_ms: shield_start.elapsed().as_millis() as u64,
43
+ reasoning: format!("Detected {} PII entities, proof generated.", pii_map.len()),
44
+ });
45
+
46
+ // Inference agent
47
+ let inf_start = Instant::now();
48
+ let (triage_result, model_used, device_info) = inference::qwen::generate(&redacted_prompt).await?;
49
+ steps.push(AgentStep {
50
+ name: "Triage".into(),
51
+ status: "completed".into(),
52
+ duration_ms: inf_start.elapsed().as_millis() as u64,
53
+ reasoning: format!("Model Qwen2.5-{} on {}.", model_used, device_info),
54
+ });
55
+
56
+ // Audit agent
57
+ let audit_start = Instant::now();
58
+ let cid_input = format!("{}||{}||{}", redacted_prompt, triage_result, redaction_proof);
59
+ let cid = web3::filecoin::generate_cid(&cid_input)?;
60
+ let tx_hash = web3::base_tx::commit_cid(&cid).await?;
61
+ steps.push(AgentStep {
62
+ name: "Audit".into(),
63
+ status: "completed".into(),
64
+ duration_ms: audit_start.elapsed().as_millis() as u64,
65
+ reasoning: "CID stored on Base Sepolia.".into(),
66
+ });
67
+
68
+ Ok(TriageOutput {
69
+ redacted_prompt,
70
+ pii_map,
71
+ triage_result,
72
+ model_used,
73
+ cid,
74
+ transaction_hash: tx_hash,
75
+ redaction_proof,
76
+ device_info,
77
+ agent_steps: steps,
78
+ })
79
+ }
src/proof.rs ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ use sha2::{Digest, Sha256};
2
+ use crate::shield::redact::PiiMatch;
3
+
4
+ pub fn generate_proof(original: &str, pii_map: &[PiiMatch]) -> String {
5
+ let mut hasher = Sha256::new();
6
+ hasher.update(original.as_bytes());
7
+ for m in pii_map {
8
+ hasher.update(m.entity_type.as_bytes());
9
+ hasher.update(m.original.as_bytes());
10
+ hasher.update(m.placeholder.as_bytes());
11
+ }
12
+ hex::encode(hasher.finalize())
13
+ }
static/index.html ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>RustVital‑AMD | Zero‑Trust Medical AI</title>
7
+ <script src="https://cdn.tailwindcss.com"></script>
8
+ <script src="https://unpkg.com/htmx.org@1.9.10"></script>
9
+ <style>
10
+ .htmx-indicator { display: none; }
11
+ .htmx-request .htmx-indicator { display: inline; }
12
+ .htmx-request.htmx-indicator { display: inline; }
13
+ .pii-highlight { background-color: #fee2e2; padding: 0 2px; border-radius: 3px; font-weight: bold; }
14
+ </style>
15
+ </head>
16
+ <body class="bg-gray-50 min-h-screen flex flex-col items-center p-4">
17
+ <div class="max-w-3xl w-full">
18
+ <div id="device-banner" class="bg-purple-100 text-purple-800 px-4 py-2 rounded-lg mb-2 text-sm text-center font-medium"></div>
19
+ <script>
20
+ fetch('/status')
21
+ .then(r=>r.json())
22
+ .then(s=>{
23
+ document.getElementById('device-banner').textContent =
24
+ `Running on ${s.device} – Model: ${s.model}`;
25
+ });
26
+ </script>
27
+
28
+ <div class="bg-white rounded-2xl shadow-xl p-6 mb-6">
29
+ <div class="flex items-center gap-3 mb-4">
30
+ <span class="text-4xl">🏥</span>
31
+ <h1 class="text-2xl font-bold text-gray-800">RustVital‑AMD</h1>
32
+ </div>
33
+ <p class="text-gray-500 mb-4">Zero‑trust medical triage with on‑chain audit</p>
34
+
35
+ <div class="flex flex-col md:flex-row gap-4">
36
+ <div class="flex-1">
37
+ <label class="block text-sm font-medium text-gray-700 mb-1">Original Note</label>
38
+ <textarea id="patient-note" name="patient_note" rows="5"
39
+ class="w-full border border-gray-300 rounded-lg p-3 focus:ring-2 focus:ring-purple-500 focus:border-transparent"
40
+ placeholder="Enter patient note...">Patient John Smith, 45 yo, chest pain</textarea>
41
+ </div>
42
+ <div id="redacted-preview" class="flex-1 hidden">
43
+ <label class="block text-sm font-medium text-gray-700 mb-1">Redacted (PII removed)</label>
44
+ <div id="redacted-text" class="bg-gray-100 p-3 rounded-lg text-sm font-mono"></div>
45
+ </div>
46
+ </div>
47
+
48
+ <button hx-post="/triage" hx-trigger="click" hx-target="#result" hx-indicator="#spinner"
49
+ hx-vals='{"patient_note": document.getElementById("patient-note").value, "consent_hash": "abc123"}'
50
+ class="mt-4 w-full bg-purple-600 hover:bg-purple-700 text-white font-semibold py-3 rounded-lg transition flex items-center justify-center gap-2">
51
+ <span>Start Triage</span>
52
+ <svg id="spinner" class="htmx-indicator animate-spin h-5 w-5 text-white" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
53
+ <circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
54
+ <path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z"></path>
55
+ </svg>
56
+ </button>
57
+ </div>
58
+
59
+ <div id="result" class="space-y-4"></div>
60
+ </div>
61
+
62
+ <script>
63
+ function highlightPlaceholders(text) {
64
+ return text.replace(/\[([A-Z_]+)_(\d+)\]/g, '<span class="pii-highlight">[$1_$2]</span>');
65
+ }
66
+
67
+ document.body.addEventListener('htmx:afterOnLoad', function(evt) {
68
+ if (evt.detail.target.id === 'result' && evt.detail.xhr.status === 200) {
69
+ try {
70
+ const data = JSON.parse(evt.detail.xhr.responseText);
71
+ const redactedDiv = document.getElementById('redacted-preview');
72
+ redactedDiv.classList.remove('hidden');
73
+ document.getElementById('redacted-text').innerHTML = highlightPlaceholders(data.redacted_prompt);
74
+
75
+ const stepsHtml = data.agent_steps.map(step => `
76
+ <div class="flex items-center gap-2 text-sm">
77
+ <span class="px-2 py-1 bg-green-100 text-green-700 rounded-full">✅ ${step.name}</span>
78
+ <span class="text-gray-500">${step.reasoning} (${step.duration_ms}ms)</span>
79
+ </div>
80
+ `).join('');
81
+
82
+ const piiHtml = data.pii_map.map(p => `
83
+ <li class="text-sm">🔴 <strong>${p.original}</strong> → <code>${p.placeholder}</code></li>
84
+ `).join('');
85
+
86
+ document.getElementById('result').innerHTML = `
87
+ <div class="bg-white rounded-2xl shadow-xl p-6 space-y-4">
88
+ <div class="text-sm text-purple-700 bg-purple-50 px-3 py-1 rounded-full inline-block">
89
+ ${data.device_info} · Model: Qwen2.5-${data.model_used}
90
+ </div>
91
+ <div>
92
+ <h3 class="font-semibold text-gray-700 mb-2">Agent Progress</h3>
93
+ <div class="space-y-1">${stepsHtml}</div>
94
+ </div>
95
+ <div>
96
+ <h3 class="font-semibold text-gray-700">Triage Result</h3>
97
+ <div class="bg-purple-50 p-3 rounded-lg text-lg font-medium">${data.triage_result}</div>
98
+ </div>
99
+ <div>
100
+ <h3 class="font-semibold text-gray-700">PII Redaction Map</h3>
101
+ <ul class="list-disc list-inside text-sm text-gray-600">${piiHtml}</ul>
102
+ </div>
103
+ <div>
104
+ <h3 class="font-semibold text-gray-700">Redaction Proof (SHA‑256)</h3>
105
+ <code class="text-xs bg-gray-100 p-2 rounded block mt-1">${data.redaction_proof}</code>
106
+ </div>
107
+ <div>
108
+ <h3 class="font-semibold text-gray-700">On‑Chain Audit</h3>
109
+ <div class="text-sm">
110
+ <p><strong>CID:</strong> <code>${data.cid}</code></p>
111
+ <p><strong>Transaction:</strong> <a href="https://sepolia.basescan.org/tx/${data.transaction_hash}" target="_blank" class="text-purple-600 underline">${data.transaction_hash}</a></p>
112
+ </div>
113
+ </div>
114
+ </div>
115
+ `;
116
+ } catch(e) {}
117
+ }
118
+ });
119
+ </script>
120
+ </body>
121
+ </html>