Spaces:
Sleeping
Sleeping
File size: 2,638 Bytes
553d849 9c40b53 893de1e 9c40b53 766670a 553d849 f944c3f 553d849 542758f e52d269 553d849 766670a 7b404af 8286586 7b404af 8286586 44bb543 a9a1f7c 553d849 893de1e 553d849 893de1e 553d849 bfd13b1 8286586 bfd13b1 556ab31 8286586 4a22da8 bfd13b1 8286586 bfd13b1 8ecb459 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 | from typing import TypedDict, Annotated
from tool import (add,
substract,
multiply,
divide,
DuckDuckGoSearchTool,
TavilySearchTool,
WikipediaSearchTool,
ArxivSearchTool,
PubmedSearchTool,
save_and_read_file,
download_file_from_url,
extract_text_from_image,
analyze_csv_file,
analyse_excel_file)
from os import getenv
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, SystemMessage, HumanMessage, AIMessage
from langgraph.graph import StateGraph, START, END, MessagesState
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_groq import ChatGroq
HUGGINGFACEHUB_API_TOKEN = getenv("HUGGINGFACEHUB_API_TOKEN")
# load the system prompt from the file
with open("prompt.txt", "r", encoding="utf-8") as f:
system_prompt = f.read()
# System message
sys_msg = SystemMessage(content=system_prompt)
# Loading the assistant
chat = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
tools = [add,
substract,
multiply,
divide,
DuckDuckGoSearchTool,
TavilySearchTool,
WikipediaSearchTool,
ArxivSearchTool,
PubmedSearchTool,
save_and_read_file,
download_file_from_url,
extract_text_from_image,
analyze_csv_file,
analyse_excel_file]
chat_with_tools = chat.bind_tools(tools)
def simple_graph():
## Defining our nodes
def assistant(state: MessagesState):
"""Assistant node"""
return {"messages": [chat_with_tools.invoke(state["messages"])]}
def retriever(state: MessagesState):
"""Retriever node"""
# I don't want to use a Retriever / Using similar questions.
return {"messages": [sys_msg] + state["messages"]}
# Build graph / nodes
builder = StateGraph(MessagesState)
builder.add_node("retriever", retriever) # Assistant
builder.add_node("assistant", assistant) # Assistant
builder.add_node("tools", ToolNode(tools)) # Tools
# Logic / edges
builder.add_edge(START, "retriever")
builder.add_edge("retriever", "assistant")
builder.add_conditional_edges("assistant", tools_condition)
builder.add_edge("tools", "assistant")
graph = builder.compile()
return graph |