Spaces:
Running
Running
| """ | |
| Claude Web API Proxy - HuggingFace Spaces Enhanced | |
| """ | |
| from fastapi import FastAPI, Request | |
| from fastapi.responses import StreamingResponse, JSONResponse, HTMLResponse | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from contextlib import asynccontextmanager | |
| import httpx | |
| import json | |
| import asyncio | |
| from datetime import datetime | |
| from typing import Optional, Dict, List | |
| import os | |
| import uuid | |
| import logging | |
| from enum import Enum | |
| from pathlib import Path | |
| from huggingface_hub import HfApi, hf_hub_download, upload_file | |
| from fake_useragent import UserAgent | |
| import random | |
| import chromadb | |
| from sentence_transformers import SentenceTransformer | |
| import shutil | |
| logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(message)s') | |
| logger = logging.getLogger(__name__) | |
| # ============ AccountManager ============ | |
| class AccountStatus(str, Enum): | |
| ACTIVE = "Active" | |
| LIMITED = "Limited" | |
| ERROR = "Error" | |
| DISABLE = "Disable" | |
| class Account: | |
| def __init__(self, id: str, provider: str, credential_type: str, credential_value: str, | |
| phone: str = "", email: str = "", note: str = ""): | |
| self.id = id | |
| self.provider = provider | |
| self.account_type = "网页版" | |
| self.credential_type = credential_type | |
| self.credential_value = credential_value | |
| self.phone = phone | |
| self.email = email | |
| self.note = note | |
| self.status = AccountStatus.ACTIVE | |
| self.daily_calls = 0 | |
| self.total_calls = 0 | |
| self.last_call_time = None | |
| self.created_at = datetime.now() | |
| self.error_403_count = 0 | |
| def to_dict(self): | |
| return { | |
| "id": self.id, | |
| "provider": self.provider, | |
| "account_type": self.account_type, | |
| "credential_type": self.credential_type, | |
| "credential_value_masked": self.credential_value[-10:] if len(self.credential_value) > 10 else "***", | |
| "credential_value": self.credential_value, | |
| "phone": self.phone, | |
| "email": self.email, | |
| "note": self.note, | |
| "status": self.status, | |
| "daily_calls": self.daily_calls, | |
| "total_calls": self.total_calls, | |
| "error_count": self.error_403_count, | |
| "last_call_time": self.last_call_time.isoformat() if self.last_call_time else None, | |
| "created_at": self.created_at.isoformat(), | |
| "created_days": (datetime.now() - self.created_at).days | |
| } | |
| class AccountManager: | |
| def __init__(self): | |
| self.accounts: Dict[str, Account] = {} | |
| self.current_index = 0 | |
| def add_account(self, account: Account): | |
| self.accounts[account.id] = account | |
| def remove_account(self, account_id: str): | |
| if account_id in self.accounts: | |
| del self.accounts[account_id] | |
| def get_next_active_account(self, provider: str = None) -> Optional[Account]: | |
| active = [a for a in self.accounts.values() if a.status == AccountStatus.ACTIVE] | |
| if provider: | |
| active = [a for a in active if a.provider == provider] | |
| if not active: | |
| return None | |
| # 加权LRU: 优先选择调用次数少的,次数相同时选最久未使用的 | |
| return min(active, key=lambda a: (a.daily_calls, a.last_call_time or datetime.min)) | |
| def update_account_status(self, account_id: str, status: AccountStatus): | |
| if account_id in self.accounts: | |
| self.accounts[account_id].status = status | |
| self.accounts[account_id].error_403_count = 0 | |
| def record_call(self, account_id: str, success: bool, error_msg: str = ""): | |
| if account_id not in self.accounts: | |
| return | |
| acc = self.accounts[account_id] | |
| acc.total_calls += 1 | |
| acc.daily_calls += 1 | |
| acc.last_call_time = datetime.now() | |
| if not success: | |
| if "403" in error_msg: | |
| acc.error_403_count += 1 | |
| if acc.error_403_count >= 3: | |
| acc.status = AccountStatus.ERROR | |
| elif "limit" in error_msg.lower(): | |
| acc.status = AccountStatus.LIMITED | |
| else: | |
| acc.error_403_count = 0 | |
| acc.status = AccountStatus.ERROR | |
| def reset_daily_calls(self): | |
| for acc in self.accounts.values(): | |
| acc.daily_calls = 0 | |
| def to_dict(self): | |
| return {aid: acc.to_dict() for aid, acc in self.accounts.items()} | |
| def from_dict(self, data: dict): | |
| for aid, acc_data in data.items(): | |
| acc = Account( | |
| id=acc_data["id"], | |
| provider=acc_data["provider"], | |
| credential_type=acc_data["credential_type"], | |
| credential_value=acc_data["credential_value"], | |
| phone=acc_data.get("phone", ""), | |
| email=acc_data.get("email", ""), | |
| note=acc_data.get("note", "") | |
| ) | |
| acc.status = AccountStatus(acc_data["status"]) | |
| acc.daily_calls = acc_data.get("daily_calls", 0) | |
| acc.total_calls = acc_data.get("total_calls", 0) | |
| if acc_data.get("last_call_time"): | |
| acc.last_call_time = datetime.fromisoformat(acc_data["last_call_time"]) | |
| acc.created_at = datetime.fromisoformat(acc_data["created_at"]) | |
| self.accounts[aid] = acc | |
| # ============ DatasetBackup ============ | |
| class DatasetBackup: | |
| def __init__(self): | |
| self.dataset_name = os.getenv("HF_BACKUP_REPO", "multi-ai-proxy-backup") | |
| self.token = os.getenv("HF_TOKEN") | |
| self.api = HfApi(token=self.token) if self.token else None | |
| self.local_dir = Path("/tmp/backup") | |
| self.local_dir.mkdir(exist_ok=True) | |
| def backup_accounts(self, accounts_data: dict): | |
| file_path = self.local_dir / "accounts.json" | |
| with open(file_path, 'w') as f: | |
| json.dump(accounts_data, f, indent=2) | |
| if self.api: | |
| try: | |
| upload_file(path_or_fileobj=str(file_path), path_in_repo="accounts.json", | |
| repo_id=self.dataset_name, repo_type="dataset", token=self.token) | |
| logger.info("Accounts backed up") | |
| except Exception as e: | |
| logger.error(f"Backup accounts failed: {e}") | |
| def restore_accounts(self) -> dict: | |
| try: | |
| if self.api: | |
| file_path = hf_hub_download(repo_id=self.dataset_name, filename="accounts.json", | |
| repo_type="dataset", token=self.token) | |
| with open(file_path, 'r') as f: | |
| return json.load(f) | |
| except Exception as e: | |
| logger.warning(f"Restore accounts failed: {e}") | |
| return {} | |
| def backup_conversations(self, conv_data: dict): | |
| file_path = self.local_dir / "conversations.json" | |
| with open(file_path, 'w') as f: | |
| json.dump(conv_data, f, indent=2) | |
| if self.api: | |
| try: | |
| upload_file(path_or_fileobj=str(file_path), path_in_repo="conversations.json", | |
| repo_id=self.dataset_name, repo_type="dataset", token=self.token) | |
| logger.info("Conversations backed up") | |
| except Exception as e: | |
| logger.error(f"Backup conversations failed: {e}") | |
| def restore_conversations(self) -> dict: | |
| try: | |
| if self.api: | |
| file_path = hf_hub_download(repo_id=self.dataset_name, filename="conversations.json", | |
| repo_type="dataset", token=self.token) | |
| with open(file_path, 'r') as f: | |
| return json.load(f) | |
| except Exception as e: | |
| logger.warning(f"Restore conversations failed: {e}") | |
| return {} | |
| def backup_vectors(self, chroma_dir: Path): | |
| if not chroma_dir.exists(): | |
| return | |
| zip_path = self.local_dir / "chroma_db" | |
| try: | |
| shutil.make_archive(str(zip_path), 'zip', chroma_dir) | |
| if self.api: | |
| upload_file(path_or_fileobj=f"{zip_path}.zip", path_in_repo="chroma_db.zip", | |
| repo_id=self.dataset_name, repo_type="dataset", token=self.token) | |
| logger.info("Vectors backed up") | |
| except Exception as e: | |
| logger.error(f"Backup vectors failed: {e}") | |
| def restore_vectors(self, chroma_dir: Path): | |
| try: | |
| if self.api: | |
| zip_path = hf_hub_download(repo_id=self.dataset_name, filename="chroma_db.zip", | |
| repo_type="dataset", token=self.token) | |
| shutil.unpack_archive(zip_path, chroma_dir) | |
| logger.info("Vectors restored") | |
| except Exception as e: | |
| logger.warning(f"Restore vectors failed: {e}") | |
| # ============ ConversationCache ============ | |
| class ConversationCache: | |
| def __init__(self): | |
| self.short_term: Dict[str, List] = {} | |
| self.long_term: Dict[str, Dict] = {} | |
| self.chroma_dir = Path("/tmp/chroma_db") | |
| self.chroma_dir.mkdir(exist_ok=True) | |
| self.chroma_client = chromadb.PersistentClient(path=str(self.chroma_dir)) | |
| self.collection = self.chroma_client.get_or_create_collection("conversations") | |
| self.embedder = SentenceTransformer('paraphrase-multilingual-MiniLM-L12-v2') | |
| logger.info("ConversationCache initialized with vector search") | |
| def add_message(self, conv_id: str, role: str, content: str): | |
| if conv_id not in self.short_term: | |
| self.short_term[conv_id] = [] | |
| self.short_term[conv_id].append({"role": role, "content": content}) | |
| if len(self.short_term[conv_id]) > 10: | |
| self.short_term[conv_id] = self.short_term[conv_id][-10:] | |
| if role == "user" and len(content) > 10: | |
| try: | |
| embedding = self.embedder.encode(content).tolist() | |
| self.collection.add( | |
| embeddings=[embedding], | |
| documents=[content], | |
| metadatas=[{"conv_id": conv_id, "role": role, "timestamp": datetime.now().isoformat()}], | |
| ids=[f"{conv_id}_{uuid.uuid4().hex[:8]}"] | |
| ) | |
| except Exception as e: | |
| logger.warning(f"Vector add failed: {e}") | |
| def get_context(self, conv_id: str, query: str) -> List[Dict]: | |
| context = [] | |
| short = self.short_term.get(conv_id, [])[-5:] | |
| context.extend(short) | |
| if len(query) > 10: | |
| try: | |
| query_embedding = self.embedder.encode(query).tolist() | |
| results = self.collection.query(query_embeddings=[query_embedding], n_results=3) | |
| if results['documents'] and results['documents'][0]: | |
| for doc in results['documents'][0]: | |
| context.append({"role": "user", "content": doc}) | |
| except Exception as e: | |
| logger.warning(f"Vector search failed: {e}") | |
| if conv_id in self.long_term: | |
| context.insert(0, {"role": "system", "content": self.long_term[conv_id].get("summary", "")}) | |
| return context | |
| def _extract_long_term(self, conv_id: str): | |
| if conv_id not in self.short_term or len(self.short_term[conv_id]) < 5: | |
| return | |
| messages = self.short_term[conv_id] | |
| summary = f"Previous context: {len(messages)} messages" | |
| self.long_term[conv_id] = {"summary": summary, "updated": datetime.now().isoformat()} | |
| def to_dict(self): | |
| return { | |
| "short_term": self.short_term, | |
| "long_term": self.long_term | |
| } | |
| def from_dict(self, data: dict): | |
| self.short_term = data.get("short_term", {}) | |
| self.long_term = data.get("long_term", {}) | |
| # ============ FingerprintSimulator ============ | |
| class FingerprintSimulator: | |
| def __init__(self): | |
| self.ua = UserAgent() | |
| def get_headers(self): | |
| return { | |
| 'User-Agent': self.ua.random, | |
| 'Accept': 'application/json', | |
| 'Accept-Language': random.choice(['en-US,en;q=0.9', 'zh-CN,zh;q=0.9']), | |
| 'Referer': 'https://claude.ai/chats', | |
| 'Origin': 'https://claude.ai', | |
| 'Sec-Fetch-Dest': 'empty', | |
| 'Sec-Fetch-Mode': 'cors', | |
| 'Sec-Fetch-Site': 'same-origin' | |
| } | |
| # ============ Global Instances ============ | |
| account_manager = AccountManager() | |
| dataset_backup = DatasetBackup() | |
| conv_cache = ConversationCache() | |
| fingerprint = FingerprintSimulator() | |
| last_request_time = datetime.now() | |
| async def check_limited_accounts(): | |
| while True: | |
| await asyncio.sleep(7200) | |
| for acc in account_manager.accounts.values(): | |
| if acc.status == AccountStatus.LIMITED: | |
| try: | |
| headers = fingerprint.get_headers() | |
| headers['Cookie'] = f'sessionKey={acc.credential_value}' | |
| async with httpx.AsyncClient() as client: | |
| r = await client.get('https://claude.ai/api/organizations', headers=headers, timeout=10) | |
| if r.status_code == 200: | |
| acc.status = AccountStatus.ACTIVE | |
| except: | |
| pass | |
| async def daily_reset(): | |
| while True: | |
| await asyncio.sleep(86400) | |
| account_manager.reset_daily_calls() | |
| async def auto_backup(): | |
| while True: | |
| await asyncio.sleep(3600) | |
| backup_mode = os.getenv("BACKUP_MODE", "all") | |
| if backup_mode == "none": | |
| continue | |
| if backup_mode in ["all", "accounts"]: | |
| dataset_backup.backup_accounts(account_manager.to_dict()) | |
| if backup_mode in ["all", "cache"]: | |
| dataset_backup.backup_conversations(conv_cache.to_dict()) | |
| if backup_mode in ["all", "vector"]: | |
| dataset_backup.backup_vectors(conv_cache.chroma_dir) | |
| async def lifespan(app: FastAPI): | |
| restore_mode = os.getenv("RESTORE_MODE", "all") | |
| if restore_mode in ["all", "vector"]: | |
| dataset_backup.restore_vectors(conv_cache.chroma_dir) | |
| if restore_mode in ["all", "accounts"]: | |
| data = dataset_backup.restore_accounts() | |
| if data: | |
| account_manager.from_dict(data) | |
| logger.info(f"Restored {len(data)} accounts") | |
| if restore_mode in ["all", "cache"]: | |
| data = dataset_backup.restore_conversations() | |
| if data: | |
| conv_cache.from_dict(data) | |
| logger.info("Restored conversations") | |
| asyncio.create_task(check_limited_accounts()) | |
| asyncio.create_task(daily_reset()) | |
| asyncio.create_task(auto_backup()) | |
| logger.info("Application started") | |
| yield | |
| dataset_backup.backup_accounts(account_manager.to_dict()) | |
| dataset_backup.backup_conversations(conv_cache.to_dict()) | |
| dataset_backup.backup_vectors(conv_cache.chroma_dir) | |
| app = FastAPI(title="Claude Proxy Enhanced", lifespan=lifespan) | |
| app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"]) | |
| async def root(): | |
| return {"name": "Claude Proxy Enhanced", "version": "3.0"} | |
| async def health(): | |
| return {"status": "ok", "accounts": len(account_manager.accounts)} | |
| async def api_info(request: Request): | |
| base_url = str(request.base_url).rstrip('/') | |
| return { | |
| "claude": { | |
| "baseUrl": base_url, | |
| "api": "anthropic-messages", | |
| "endpoint": f"{base_url}/v1/messages" | |
| }, | |
| "chatgpt": { | |
| "baseUrl": base_url, | |
| "api": "openai-completions", | |
| "endpoint": f"{base_url}/v1/chat/completions" | |
| }, | |
| "gemini": { | |
| "baseUrl": base_url, | |
| "api": "openai-completions", | |
| "endpoint": f"{base_url}/v1beta/chat/completions" | |
| } | |
| } | |
| async def dashboard(): | |
| with open("dashboard.html", encoding='utf-8') as f: | |
| return HTMLResponse(f.read()) | |
| def check_dashboard_auth(request: Request): | |
| password = os.getenv("DASHBOARD_PASSWORD") | |
| if not password: | |
| return True | |
| auth = request.headers.get("X-Dashboard-Password") | |
| return auth == password | |
| async def get_accounts(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| return account_manager.to_dict() | |
| async def add_account(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| data = await request.json() | |
| acc = Account( | |
| id=str(uuid.uuid4()), | |
| provider=data["provider"], | |
| credential_type=data["credential_type"], | |
| credential_value=data["credential_value"], | |
| phone=data.get("phone", ""), | |
| email=data.get("email", ""), | |
| note=data.get("note", "") | |
| ) | |
| account_manager.add_account(acc) | |
| dataset_backup.backup_accounts(account_manager.to_dict()) | |
| return {"status": "ok"} | |
| async def update_account(account_id: str, request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| data = await request.json() | |
| if account_id in account_manager.accounts: | |
| acc = account_manager.accounts[account_id] | |
| acc.provider = data.get("provider", acc.provider) | |
| acc.credential_type = data.get("credential_type", acc.credential_type) | |
| acc.credential_value = data.get("credential_value", acc.credential_value) | |
| acc.phone = data.get("phone", acc.phone) | |
| acc.email = data.get("email", acc.email) | |
| acc.note = data.get("note", acc.note) | |
| dataset_backup.backup_accounts(account_manager.to_dict()) | |
| return {"status": "ok"} | |
| async def delete_account(account_id: str, request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| account_manager.remove_account(account_id) | |
| dataset_backup.backup_accounts(account_manager.to_dict()) | |
| return {"status": "ok"} | |
| async def update_account_status(account_id: str, request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| data = await request.json() | |
| account_manager.update_account_status(account_id, AccountStatus(data["status"])) | |
| return {"status": "ok"} | |
| async def manual_backup(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| dataset_backup.backup_accounts(account_manager.to_dict()) | |
| dataset_backup.backup_conversations(conv_cache.to_dict()) | |
| dataset_backup.backup_vectors(conv_cache.chroma_dir) | |
| return {"message": "备份成功(账号+聊天+向量库)"} | |
| async def manual_restore(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| acc_data = dataset_backup.restore_accounts() | |
| conv_data = dataset_backup.restore_conversations() | |
| dataset_backup.restore_vectors(conv_cache.chroma_dir) | |
| if acc_data: | |
| account_manager.from_dict(acc_data) | |
| if conv_data: | |
| conv_cache.from_dict(conv_data) | |
| return {"message": f"恢复成功:{len(acc_data)} 个账号 + 聊天记录 + 向量库"} | |
| async def backup_accounts_only(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| dataset_backup.backup_accounts(account_manager.to_dict()) | |
| return {"message": "账号备份成功"} | |
| async def backup_cache_only(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| dataset_backup.backup_conversations(conv_cache.to_dict()) | |
| return {"message": "聊天记录备份成功"} | |
| async def backup_vector_only(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| dataset_backup.backup_vectors(conv_cache.chroma_dir) | |
| return {"message": "向量库备份成功"} | |
| async def restore_accounts_only(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| data = dataset_backup.restore_accounts() | |
| if data: | |
| account_manager.from_dict(data) | |
| return {"message": f"账号恢复成功:{len(data)} 个"} | |
| return {"message": "无账号备份数据"} | |
| async def restore_cache_only(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| data = dataset_backup.restore_conversations() | |
| if data: | |
| conv_cache.from_dict(data) | |
| return {"message": "聊天记录恢复成功"} | |
| return {"message": "无聊天记录备份"} | |
| async def restore_vector_only(request: Request): | |
| if not check_dashboard_auth(request): | |
| return JSONResponse({"error": "Unauthorized"}, 401) | |
| dataset_backup.restore_vectors(conv_cache.chroma_dir) | |
| return {"message": "向量库恢复成功"} | |
| async def get_org_id(key: str) -> Optional[str]: | |
| headers = fingerprint.get_headers() | |
| headers['Cookie'] = f'sessionKey={key}' | |
| async with httpx.AsyncClient() as client: | |
| r = await client.get('https://claude.ai/api/organizations', headers=headers, timeout=30) | |
| if r.status_code == 200: | |
| data = r.json() | |
| return data[0]['uuid'] if data else None | |
| raise Exception(f"Status {r.status_code}") | |
| async def create_conv(key: str, org_id: str) -> Optional[str]: | |
| conv_id = str(uuid.uuid4()) | |
| headers = fingerprint.get_headers() | |
| headers['Cookie'] = f'sessionKey={key}' | |
| headers['Content-Type'] = 'application/json' | |
| async with httpx.AsyncClient() as client: | |
| r = await client.post(f'https://claude.ai/api/organizations/{org_id}/chat_conversations', | |
| headers=headers, json={ | |
| 'uuid': conv_id, | |
| 'name': '', | |
| 'include_conversation_preferences': True, | |
| 'is_temporary': False, | |
| 'enabled_imagine': False | |
| }, timeout=30) | |
| return conv_id if r.status_code in [200, 201] else None | |
| async def chatgpt_completions(request: Request): | |
| return await handle_chat_request(request, "chatgpt", "openai") | |
| async def claude_messages(request: Request): | |
| return await handle_chat_request(request, "claude", "anthropic") | |
| async def gemini_completions(request: Request): | |
| return await handle_chat_request(request, "gemini", "openai") | |
| async def handle_chat_request(request: Request, provider: str, api_format: str): | |
| global last_request_time | |
| last_request_time = datetime.now() | |
| try: | |
| body = await request.json() | |
| account = account_manager.get_next_active_account(provider) | |
| if not account: | |
| return JSONResponse({"error": f"No active {provider} accounts"}, 503) | |
| key = account.credential_value | |
| messages = body.get('messages', []) | |
| model = body.get('model', 'claude-sonnet-4-6') | |
| stream = body.get('stream', True) | |
| try: | |
| org_id = await get_org_id(key) | |
| if not org_id: | |
| account_manager.record_call(account.id, False, "Invalid key") | |
| return JSONResponse({"error": "Auth failed"}, 401) | |
| except Exception as e: | |
| error_msg = str(e) | |
| logger.error(f"Account {account.id} get_org_id failed: {error_msg}") | |
| account_manager.record_call(account.id, False, error_msg) | |
| return JSONResponse({"error": error_msg}, 500) | |
| try: | |
| conv_id = await create_conv(key, org_id) | |
| if not conv_id: | |
| account_manager.record_call(account.id, False, "Conv failed") | |
| return JSONResponse({"error": "Conv failed"}, 500) | |
| except Exception as e: | |
| error_msg = str(e) | |
| logger.error(f"Account {account.id} create_conv failed: {error_msg}") | |
| account_manager.record_call(account.id, False, error_msg) | |
| return JSONResponse({"error": error_msg}, 500) | |
| prompt = '\n\n'.join([f"{m['role']}: {m['content']}" for m in messages]) | |
| context = conv_cache.get_context(conv_id, prompt) | |
| if context: | |
| prompt = '\n\n'.join([f"{c['role']}: {c['content']}" for c in context]) + '\n\n' + prompt | |
| conv_cache.add_message(conv_id, "user", messages[-1]['content']) | |
| headers = fingerprint.get_headers() | |
| headers['Cookie'] = f'sessionKey={key}' | |
| headers['Content-Type'] = 'application/json' | |
| payload = { | |
| 'prompt': prompt, | |
| 'timezone': 'Asia/Shanghai', | |
| 'locale': 'en-US', | |
| 'model': model, | |
| 'rendering_mode': 'messages', | |
| 'attachments': [], | |
| 'files': [], | |
| 'tools': [ | |
| {"type": "web_search_v0", "name": "web_search"}, | |
| {"type": "artifacts_v0", "name": "artifacts"}, | |
| {"type": "repl_v0", "name": "repl"} | |
| ] | |
| } | |
| if not stream: | |
| full_text = '' | |
| async with httpx.AsyncClient() as client: | |
| async with client.stream('POST', | |
| f'https://claude.ai/api/organizations/{org_id}/chat_conversations/{conv_id}/completion', | |
| headers=headers, json=payload, timeout=120) as resp: | |
| async for line in resp.aiter_lines(): | |
| if line.startswith('data: '): | |
| try: | |
| data = json.loads(line[6:]) | |
| if data.get('type') == 'content_block_delta': | |
| full_text += data.get('delta', {}).get('text', '') | |
| except: | |
| pass | |
| conv_cache.add_message(conv_id, "assistant", full_text) | |
| account_manager.record_call(account.id, True) | |
| if api_format == "anthropic": | |
| return JSONResponse({ | |
| "id": f"msg_{uuid.uuid4().hex[:24]}", | |
| "type": "message", | |
| "role": "assistant", | |
| "content": [{"type": "text", "text": full_text}], | |
| "model": model, | |
| "stop_reason": "end_turn", | |
| "stop_sequence": None, | |
| "usage": {"input_tokens": 0, "output_tokens": 0} | |
| }) | |
| else: | |
| return JSONResponse({ | |
| "id": f"chatcmpl-{int(datetime.now().timestamp())}", | |
| "object": "chat.completion", | |
| "model": model, | |
| "choices": [{"index": 0, "message": {"role": "assistant", "content": full_text}, "finish_reason": "stop"}] | |
| }) | |
| async def generate(): | |
| full_text = '' | |
| msg_id = f"msg_{uuid.uuid4().hex[:24]}" | |
| try: | |
| if api_format == "anthropic": | |
| yield f"event: message_start\ndata: {json.dumps({'type': 'message_start', 'message': {'id': msg_id, 'type': 'message', 'role': 'assistant', 'content': [], 'model': model, 'stop_reason': None, 'usage': {'input_tokens': 0, 'output_tokens': 0}}})}\n\n" | |
| yield f"event: content_block_start\ndata: {json.dumps({'type': 'content_block_start', 'index': 0, 'content_block': {'type': 'text', 'text': ''}})}\n\n" | |
| async with httpx.AsyncClient() as client: | |
| async with client.stream('POST', | |
| f'https://claude.ai/api/organizations/{org_id}/chat_conversations/{conv_id}/completion', | |
| headers=headers, json=payload, timeout=120) as resp: | |
| async for line in resp.aiter_lines(): | |
| if line.startswith('data: '): | |
| try: | |
| data = json.loads(line[6:]) | |
| if data.get('type') == 'content_block_delta': | |
| text = data.get('delta', {}).get('text', '') | |
| full_text += text | |
| if api_format == "anthropic": | |
| yield f"event: content_block_delta\ndata: {json.dumps({'type': 'content_block_delta', 'index': 0, 'delta': {'type': 'text_delta', 'text': text}})}\n\n" | |
| else: | |
| chunk = { | |
| "id": f"chatcmpl-{int(datetime.now().timestamp())}", | |
| "object": "chat.completion.chunk", | |
| "model": model, | |
| "choices": [{"index": 0, "delta": {"content": text}, "finish_reason": None}] | |
| } | |
| yield f"data: {json.dumps(chunk)}\n\n" | |
| except: | |
| pass | |
| if api_format == "anthropic": | |
| yield f"event: content_block_stop\ndata: {json.dumps({'type': 'content_block_stop', 'index': 0})}\n\n" | |
| yield f"event: message_delta\ndata: {json.dumps({'type': 'message_delta', 'delta': {'stop_reason': 'end_turn', 'stop_sequence': None}, 'usage': {'output_tokens': 0}})}\n\n" | |
| yield f"event: message_stop\ndata: {json.dumps({'type': 'message_stop'})}\n\n" | |
| else: | |
| yield "data: [DONE]\n\n" | |
| conv_cache.add_message(conv_id, "assistant", full_text) | |
| account_manager.record_call(account.id, True) | |
| except Exception as e: | |
| account_manager.record_call(account.id, False, str(e)) | |
| yield f"data: {json.dumps({'error': str(e)})}\n\n" | |
| return StreamingResponse(generate(), media_type="text/event-stream") | |
| except Exception as e: | |
| logger.error(f"Error: {e}") | |
| return JSONResponse({"error": str(e)}, 500) | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="0.0.0.0", port=7860) | |