Spaces:
Sleeping
Sleeping
| import { ChatMessage } from "@langchain/core/messages"; | |
| //#region src/utils/misc.ts | |
| const iife$1 = (fn) => fn(); | |
| function isReasoningModel(model) { | |
| if (!model) return false; | |
| if (/^o\d/.test(model ?? "")) return true; | |
| if (model.startsWith("gpt-5") && !model.startsWith("gpt-5-chat")) return true; | |
| return false; | |
| } | |
| function extractGenericMessageCustomRole(message) { | |
| if (message.role !== "system" && message.role !== "developer" && message.role !== "assistant" && message.role !== "user" && message.role !== "function" && message.role !== "tool") console.warn(`Unknown message role: ${message.role}`); | |
| return message.role; | |
| } | |
| function getFilenameFromMetadata(block) { | |
| return block.metadata?.filename ?? block.metadata?.name ?? block.metadata?.title; | |
| } | |
| const LC_AUTOGENERATED_FILENAME = "LC_AUTOGENERATED"; | |
| function getRequiredFilenameFromMetadata(block) { | |
| const filename = block.metadata?.filename ?? block.metadata?.name ?? block.metadata?.title; | |
| if (!filename) { | |
| console.warn("OpenAI may require a filename for file uploads. Specify a filename in the content block metadata, e.g.: { type: 'file', mimeType: '...', data: '...', metadata: { filename: 'my-file.pdf' } }. Using placeholder filename 'LC_AUTOGENERATED'."); | |
| return LC_AUTOGENERATED_FILENAME; | |
| } | |
| return filename; | |
| } | |
| function messageToOpenAIRole(message) { | |
| const type = message._getType(); | |
| switch (type) { | |
| case "system": return "system"; | |
| case "ai": return "assistant"; | |
| case "human": return "user"; | |
| case "function": return "function"; | |
| case "tool": return "tool"; | |
| case "generic": | |
| if (!ChatMessage.isInstance(message)) throw new Error("Invalid generic chat message"); | |
| return extractGenericMessageCustomRole(message); | |
| default: throw new Error(`Unknown message type: ${type}`); | |
| } | |
| } | |
| function _modelPrefersResponsesAPI(model) { | |
| if (model.includes("gpt-5.2-pro")) return true; | |
| if (model.includes("gpt-5.4-pro")) return true; | |
| if (model.includes("gpt-5.5-pro")) return true; | |
| if (model.includes("codex")) return true; | |
| return false; | |
| } | |
| //#endregion | |
| export { _modelPrefersResponsesAPI, getFilenameFromMetadata, getRequiredFilenameFromMetadata, iife$1 as iife, isReasoningModel, messageToOpenAIRole }; | |
| //# sourceMappingURL=misc.js.map |