Spaces:
Sleeping
Sleeping
File size: 5,060 Bytes
1a62fc0 65e2ff8 1a62fc0 65e2ff8 1a62fc0 65e2ff8 1a62fc0 65e2ff8 1a62fc0 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 | import logging
from telegram import Update
from telegram.ext import Application, CommandHandler, MessageHandler, filters, ContextTypes
# Enable logging
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
# Global agent instance (will be set by start_telegram_bot)
agent = None
# Command Handlers
async def start(update: Update, context: ContextTypes.DEFAULT_TYPE):
"""Send a message when the command /start is issued."""
user = update.effective_user
await update.message.reply_text(
f'Hi {user.first_name}! I am your AI coding assistant. '
f'Ask me anything and I will solve it step by step!'
)
async def help_command(update: Update, context: ContextTypes.DEFAULT_TYPE):
"""Send a message when the command /help is issued."""
help_text = """
I can help you with:
- Coding questions
- Web searches
- Mathematical calculations
- Data analysis
- And much more!
Just send me your question and I'll work on it!
"""
await update.message.reply_text(help_text)
async def handle_message(update: Update, context: ContextTypes.DEFAULT_TYPE):
"""Handle user messages and run agent"""
user_message = update.message.text
user_id = update.effective_user.id
logger.info(f"User {user_id} asked: {user_message}")
# Send "thinking" message
thinking_msg = await update.message.reply_text("🤔 Thinking...")
try:
# Run agent
result = agent.run(user_message, reset=False)
# Extract final answer
final_answer = str(result)
# Send result
await thinking_msg.edit_text(f"✅ Answer:\n\n{final_answer}")
except Exception as e:
logger.error(f"Error processing message: {e}")
await thinking_msg.edit_text(
f"❌ Sorry, I encountered an error: {str(e)}\n\n"
f"Please try rephrasing your question."
)
async def handle_streaming_message(update: Update, context: ContextTypes.DEFAULT_TYPE):
"""Handle messages with streaming updates (advanced)"""
user_message = update.message.text
# Send initial message
status_msg = await update.message.reply_text("🤔 Starting...")
try:
step_count = 0
for step_log in agent.run(user_message, stream=True, reset=False):
step_count += 1
# Update status for each step
if hasattr(step_log, 'model_output'):
await status_msg.edit_text(
f"📝 Step {step_count}:\n{step_log.model_output[:500]}..."
)
# Send final answer
final_answer = str(step_log)
await status_msg.edit_text(f"✅ Final Answer:\n\n{final_answer}")
except Exception as e:
await status_msg.edit_text(f"❌ Error: {str(e)}")
def start_telegram_bot(shared_agent, token="8634464564:AAGL7FzFkMN-Uktf97NKtDs1RFPGxec-HFI"):
"""Start the Telegram bot with a shared agent instance in a thread-safe way
Args:
shared_agent: The CodeAgent instance to use for processing messages
token: Telegram bot token (default provided, can be overridden)
"""
import asyncio
global agent
agent = shared_agent
# Create a new event loop for this thread
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# Create application
application = Application.builder().token(token).build()
# Register handlers
application.add_handler(CommandHandler("start", start))
application.add_handler(CommandHandler("help", help_command))
application.add_handler(MessageHandler(filters.TEXT & ~filters.COMMAND, handle_message))
# Start bot with stop_signals=None to avoid signal handler issues in threads
logger.info("Telegram Bot started!")
application.run_polling(
allowed_updates=Update.ALL_TYPES,
stop_signals=None # Disable signal handlers for thread safety
)
finally:
loop.close()
def main():
"""Standalone entry point (for backward compatibility)"""
from smolagents import CodeAgent, LiteLLMModel
import yaml
from tools.final_answer import FinalAnswerTool
# Use local Ollama model
model = LiteLLMModel(
model_id="ollama/qwen2.5-coder:7b",
api_base="http://localhost:11434",
max_tokens=2096,
temperature=0.5
)
# Load prompts
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
# Initialize tools
final_answer = FinalAnswerTool()
# Create agent
standalone_agent = CodeAgent(
model=model,
tools=[final_answer],
max_steps=6,
verbosity_level=1,
prompt_templates=prompt_templates
)
start_telegram_bot(standalone_agent)
if __name__ == '__main__':
main() |