Spaces:
Sleeping
Sleeping
File size: 1,388 Bytes
713b432 e52cce4 ad3a04e e52cce4 ad3a04e e52cce4 1fdeb35 ad3a04e 1fdeb35 e52cce4 ad3a04e 1fdeb35 ad3a04e 713b432 ad3a04e 713b432 ad3a04e 1fdeb35 713b432 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 | import os
import requests
API_URL = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-alpha"
HEADERS = {"Authorization": f"Bearer {os.getenv('HF_TOKEN')}"}
system_prompt = (
"You are an expert AI assistant taking a benchmark test for reasoning. "
"Answer only the main question directly. Do not explain. Do not show work. "
"If you see images or documents mentioned, assume you have access and extract answer. "
"Use search or tools if needed. Answer format must be plain with no prefix or suffix."
)
class BasicAgent:
def __init__(self):
print("✅ Agent initialized with Zephyr 7B.")
def __call__(self, question: str) -> str:
prompt = f"{system_prompt}\n\nQuestion: {question}\nAnswer:"
try:
response = requests.post(
API_URL,
headers=HEADERS,
json={"inputs": prompt},
timeout=40
)
response.raise_for_status()
result = response.json()
if isinstance(result, list):
return result[0]["generated_text"].split("Answer:")[-1].strip()
elif "generated_text" in result:
return result["generated_text"].strip()
else:
return "ERROR: Unexpected response format"
except Exception as e:
return f"AGENT ERROR: {e}" |