Spaces:
Running
Running
File size: 1,724 Bytes
c9cad9e 100a487 c9cad9e 100a487 978cf97 d1609ed 978cf97 c9cad9e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 | """
Configuration and constants for the GAIA agent.
Centralized configuration for easy management and customization.
"""
import os
from dotenv import load_dotenv
load_dotenv()
# ==================== API KEYS ====================
OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY", "")
TAVILY_API_KEY = os.getenv("TAVILY_API_KEY", "")
NVIDIA_API_KEY = os.getenv("NVIDIA_API_KEY","")
# ==================== LLM CONFIGURATION ====================
#LLM_MODEL = "inclusionai/Ling-2.6-1T:free"
LLM_MODEL = "inclusionai/Ling-2.6-1T:free"
NVIDIA = 0
NVIDIA_MODEL="qwen/qwen3-coder-480b-a35b-instruct"
LLM_TEMPERATURE = 0
LLM_MAX_ITERATIONS = 5
# ==================== TOOL CONFIGURATION ====================
WIKIPEDIA_MAX_PAGES = 2
WIKIPEDIA_CHAR_LIMIT = 8_000
YOUTUBE_CHAR_LIMIT = 10_000
WEB_SEARCH_RESULTS_LIMIT = 3
EXCEL_PREVIEW_ROWS = 50
# ==================== OUTPUT CONFIGURATION ====================
OUTPUT_FILE = "/home/nitin/AI/hfagent/results.jsonl"
FINAL_ANSWER_MAX_LENGTH = 100
REASONING_TRACE_MAX_LENGTH = 200
# ==================== TOOL NAMES ====================
TOOL_NAMES = {
"WEB_SEARCH": "web_search",
"WIKI_SEARCH": "wikisearch",
"YOUTUBE_TRANSCRIPT": "youtube_transcript",
"EXCEL_ANALYSIS": "load_and_analyze_excel_file",
"IMAGE_TEXT": "extract_text_from_image",
"AUDIO_TRANSCRIBE": "transcribe_audio",
"ADD": "addition_tool",
"SUBTRACT": "subtraction_tool",
"MULTIPLY": "multiplication_tool",
"NONE": "none",
}
# ==================== VALIDATION ====================
VALID_EXCEL_EXTENSIONS = (".xlsx", ".xls", ".csv")
VALID_IMAGE_EXTENSIONS = (".jpg", ".jpeg", ".png", ".bmp", ".tiff", ".gif")
VALID_AUDIO_EXTENSIONS = (".mp3", ".wav", ".m4a", ".flac", ".ogg")
|