billyaungmyint's picture
Sync from GitHub via hub-sync
2277115 verified
raw
history blame contribute delete
686 Bytes
from smolagents import CodeAgent, InferenceClientModel , LiteLLMModel
# Initialize a model (using Hugging Face Inference API)
model = InferenceClientModel("deepseek-ai/DeepSeek-V4-Flash")
# model = LiteLLMModel("openai/gpt-4.1-mini")
# Create an agent with no tools
agent = CodeAgent(tools=[], model=model)
# Run the agent with a task
result = agent.run("Calculate the sum of numbers from 1 to 10")
print(result)
# messages = [
# {"role": "user", "content": [{"type": "text", "text": "Calculate the sum of numbers from 1 to 10"}]}
# ]
# # Note: Models are usually called by the Agent,
# # but you can call them directly with a list of messages like this:
# print(model(messages))