Spaces:
Running
Running
File size: 3,335 Bytes
32a7233 287bd53 447cf52 e4ad155 32a7233 5c1c37e 32a7233 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 | from langchain_huggingface import HuggingFaceEndpoint,ChatHuggingFace
from langchain_core.messages import HumanMessage,SystemMessage
import os
import pandas as pd
from agent.agent_graph.graph import compiled_graph
from agent.rag.rag import rag_text_chooser
import sys
import os
from agent.agent_graph.StateTasks import Available_Tasks
from agent.tools.PDF import PDF_generator_Node
from agent.tools.email import EMAIL_sender_Node
from agent.agent_graph.Graph_Nodes import get_llm_answer
from agent.llm.prompts import NODES_Prompts
import dotenv
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
dotenv.load_dotenv("/content/drive/MyDrive/study/Projects/keys.env")
def get_response(prompt,memory,hf_key,state,user_email,user_name):
# Setting up models
os.environ["HF_TOKEN"] = hf_key
llm_gpt = HuggingFaceEndpoint(
repo_id="openai/gpt-oss-20b",#"deepseek-ai/DeepSeek-V3.2-Exp",#"openai/gpt-oss-20b",
task='conversational',
provider="auto",
max_new_tokens=2048
)
llm_gpt = ChatHuggingFace(llm=llm_gpt)
print("RAG_PATH ",os.path.join(os.path.dirname(__file__), 'agent' ,'rag', 'rag.xlsx'), os.path.exists(os.path.join(os.path.dirname(__file__), 'agent' ,'rag', 'rag.xlsx')))
rag_model = rag_text_chooser(os.path.join(os.path.dirname(__file__), 'agent' ,'rag', 'rag.xlsx'))
# update state
state["question"] = prompt
state["memory"] = memory
state["llm"] = llm_gpt
state["rag_model"] = rag_model
call = compiled_graph.invoke(state)
save_send_email(call,user_email,user_name)
os.environ["HF_TOKEN"] = "" # to prevent keep it in env for other calls and for security
return call
def save_send_email(call,user_email,user_name):
if ("all_ok" in call.keys()):
if (call['all_ok']== True):
if (call['question_type'] in [Available_Tasks.LAPTOP_CHOOSE.value ,
Available_Tasks.QUESTION.value ,
Available_Tasks.ROADMAP.value]):
email_txt = get_llm_answer(model_llm=call['llm'],messages=[HumanMessage(content=("ุงุณู
ุงูุฒู
ูู ูุชุณุชุฎุฏู
ู ูู : "+ user_name +"/n/n")+ NODES_Prompts.Email_text.value + call['question'] + str(call['memory']) + call['question_type'] + call['answer'])])
title = get_llm_answer(model_llm=call['llm'],messages=[HumanMessage(content=NODES_Prompts.Email_title.value + call['question'] + str(call['memory']) + call['question_type']+ call['answer'])])
import tempfile
path_pdf = ''
with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as tmp_file:
path_pdf = tmp_file.name
# ููุง ุชูุชุจ ุงูููุฏ ุงููู ุจูููุฏ ุงูู
ูู
print("PDF path:", path_pdf)
# ุจุนุฏ ู
ุง ุชุฎูุต ู
ู ุงูู
ูู ู
ู
ูู ุชุญุฐูู
# import os
# os.remove(path_pdf)
#path_pdf ="/content/drive/MyDrive/study/Projects/CodeBuddyAI/tmp.pdf"
PDF_generator_Node(call['answer'],title,path_pdf)
#EMAIL_sender_Node(user_email,email_txt,title,path_pdf)
import os
os.remove(path_pdf)
print("Done")
|