| """Compatibility helpers for OpenAI chat completions across model families.""" |
|
|
| from __future__ import annotations |
|
|
| from typing import Any |
|
|
|
|
| def rewrite_chat_completion_kwargs(payload: dict[str, Any]) -> dict[str, Any]: |
| """Translate deprecated chat completion parameters for reasoning models.""" |
| rewritten = dict(payload) |
| model = str(rewritten.get("model") or "") |
| if ( |
| model.startswith("gpt-5") |
| and "max_tokens" in rewritten |
| and "max_completion_tokens" not in rewritten |
| ): |
| rewritten["max_completion_tokens"] = rewritten.pop("max_tokens") |
| return rewritten |
|
|
|
|
| def patch_openai_chat_completions() -> bool: |
| """Monkeypatch the OpenAI SDK so GPT-5 chat calls accept legacy max_tokens.""" |
| try: |
| from openai.resources.chat.completions.completions import Completions |
| except Exception: |
| return False |
|
|
| current = Completions.create |
| if getattr(current, "_eval_framework_patched", False): |
| return True |
|
|
| original_create = current |
|
|
| def _patched_create(self: Any, *args: Any, **kwargs: Any) -> Any: |
| rewritten = rewrite_chat_completion_kwargs(kwargs) |
| try: |
| return original_create(self, *args, **rewritten) |
| except Exception as exc: |
| if ( |
| "Unsupported parameter: 'max_tokens'" in str(exc) |
| and "max_tokens" in kwargs |
| ): |
| retried = rewrite_chat_completion_kwargs(kwargs) |
| return original_create(self, *args, **retried) |
| raise |
|
|
| _patched_create._eval_framework_patched = True |
| Completions.create = _patched_create |
| return True |
|
|