File size: 2,217 Bytes
c2b7eb3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import { ChatMessage } from "@langchain/core/messages";
//#region src/utils/misc.ts
const iife$1 = (fn) => fn();
function isReasoningModel(model) {
	if (!model) return false;
	if (/^o\d/.test(model ?? "")) return true;
	if (model.startsWith("gpt-5") && !model.startsWith("gpt-5-chat")) return true;
	return false;
}
function extractGenericMessageCustomRole(message) {
	if (message.role !== "system" && message.role !== "developer" && message.role !== "assistant" && message.role !== "user" && message.role !== "function" && message.role !== "tool") console.warn(`Unknown message role: ${message.role}`);
	return message.role;
}
function getFilenameFromMetadata(block) {
	return block.metadata?.filename ?? block.metadata?.name ?? block.metadata?.title;
}
const LC_AUTOGENERATED_FILENAME = "LC_AUTOGENERATED";
function getRequiredFilenameFromMetadata(block) {
	const filename = block.metadata?.filename ?? block.metadata?.name ?? block.metadata?.title;
	if (!filename) {
		console.warn("OpenAI may require a filename for file uploads. Specify a filename in the content block metadata, e.g.: { type: 'file', mimeType: '...', data: '...', metadata: { filename: 'my-file.pdf' } }. Using placeholder filename 'LC_AUTOGENERATED'.");
		return LC_AUTOGENERATED_FILENAME;
	}
	return filename;
}
function messageToOpenAIRole(message) {
	const type = message._getType();
	switch (type) {
		case "system": return "system";
		case "ai": return "assistant";
		case "human": return "user";
		case "function": return "function";
		case "tool": return "tool";
		case "generic":
			if (!ChatMessage.isInstance(message)) throw new Error("Invalid generic chat message");
			return extractGenericMessageCustomRole(message);
		default: throw new Error(`Unknown message type: ${type}`);
	}
}
function _modelPrefersResponsesAPI(model) {
	if (model.includes("gpt-5.2-pro")) return true;
	if (model.includes("gpt-5.4-pro")) return true;
	if (model.includes("gpt-5.5-pro")) return true;
	if (model.includes("codex")) return true;
	return false;
}
//#endregion
export { _modelPrefersResponsesAPI, getFilenameFromMetadata, getRequiredFilenameFromMetadata, iife$1 as iife, isReasoningModel, messageToOpenAIRole };

//# sourceMappingURL=misc.js.map