ameyjoshi8198 commited on
Commit
e5cb8c8
·
1 Parent(s): 57b3c00
Files changed (2) hide show
  1. app.py +42 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import sqlite3
3
+ from huggingface_hub import InferenceClient
4
+
5
+ DB_PATH = "logs.db"
6
+ client = InferenceClient(token="your_hf_token_here")
7
+
8
+ def retrieve_evidence(question):
9
+ # simplified — swap in your actual routing logic
10
+ conn = sqlite3.connect(DB_PATH)
11
+ cursor = conn.cursor()
12
+ cursor.execute("SELECT * FROM your_summary_table LIMIT 10")
13
+ rows = cursor.fetchall()
14
+ conn.close()
15
+ return str(rows)
16
+
17
+ def answer_question(question):
18
+ evidence = retrieve_evidence(question)
19
+
20
+ prompt = f"""You are a security log analyst.
21
+ Here is the relevant evidence from the database:
22
+ {evidence}
23
+
24
+ Answer this question based only on the evidence above:
25
+ {question}"""
26
+
27
+ response = client.text_generation(
28
+ prompt,
29
+ model="mistralai/Mistral-7B-Instruct-v0.3", # free model
30
+ max_new_tokens=512
31
+ )
32
+ return response
33
+
34
+ demo = gr.Interface(
35
+ fn=answer_question,
36
+ inputs=gr.Textbox(label="Ask a question about the logs"),
37
+ outputs=gr.Textbox(label="Answer"),
38
+ title="Security Log Analyzer",
39
+ description="Ask questions about the open source log dataset."
40
+ )
41
+
42
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio
2
+ huggingface_hub
3
+ sqlite3