darwinkernelpanic commited on
Commit
00115f9
·
verified ·
1 Parent(s): e79434e

Upload example.js with huggingface_hub

Browse files
Files changed (1) hide show
  1. example.js +47 -0
example.js ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // AI Detector Example - JavaScript/Node.js
2
+ // Install: npm install @xenova/transformers onnxruntime-node
3
+
4
+ const { AutoTokenizer } = require('@xenova/transformers');
5
+ const ort = require('onnxruntime-node');
6
+
7
+ async function detectAI(text) {
8
+ // Tokenize
9
+ const tokenizer = await AutoTokenizer.from_pretrained('darwinkernelpanic/ai-detector-pgx');
10
+ const encoded = await tokenizer(text, {
11
+ padding: true,
12
+ truncation: true,
13
+ max_length: 512,
14
+ return_tensors: 'pt'
15
+ });
16
+
17
+ // Load ONNX model
18
+ const session = await ort.InferenceSession.create('./model.onnx');
19
+
20
+ // Prepare inputs
21
+ const inputIds = new ort.Tensor('int64', encoded.input_ids.data, encoded.input_ids.dims);
22
+ const attentionMask = new ort.Tensor('int64', encoded.attention_mask.data, encoded.attention_mask.dims);
23
+
24
+ // Run inference
25
+ const results = await session.run({
26
+ input_ids: inputIds,
27
+ attention_mask: attentionMask
28
+ });
29
+
30
+ // Softmax
31
+ const logits = results.logits.data;
32
+ const exp0 = Math.exp(logits[0]);
33
+ const exp1 = Math.exp(logits[1]);
34
+ const aiProb = exp1 / (exp0 + exp1);
35
+
36
+ return {
37
+ ai_probability: aiProb,
38
+ is_ai: aiProb > 0.5,
39
+ confidence: Math.abs(aiProb - 0.5) * 2
40
+ };
41
+ }
42
+
43
+ // Run example
44
+ detectAI("The mitochondria is the powerhouse of the cell...")
45
+ .then(r => console.log('AI Probability:', (r.ai_probability * 100).toFixed(1) + '%'));
46
+
47
+ module.exports = { detectAI };