diff --git a/src/request/transformer.ts b/src/request/transformer.ts index 409456e..d867ddc 100644 --- a/src/request/transformer.ts +++ b/src/request/transformer.ts @@ -1,6 +1,16 @@ -import { ChatCompletionRequest, ResponsesRequest, InputItem, Message, Content } from "../types.js"; +import { + ChatCompletionRequest, + ResponsesRequest, + InputItem, + Message, + Content, +} from "../types.js"; import { getNormalizedModel } from "./model-map.js"; -import { getReasoningConfig, getTextVerbosity, getIncludeFields } from "./reasoning.js"; +import { + getReasoningConfig, + getTextVerbosity, + getIncludeFields, +} from "./reasoning.js"; import { getCodexInstructions } from "../prompts/index.js"; import { logDebug, logWarn } from "../logger.js"; @@ -8,12 +18,14 @@ export function messagesToInput(messages: Message[]): InputItem[] { return messages.map((msg) => { const content: Content[] = []; + const contentType = msg.role === "assistant" ? "output_text" : "input_text"; + if (typeof msg.content === "string") { - content.push({ type: "input_text", text: msg.content }); + content.push({ type: contentType, text: msg.content }); } else if (Array.isArray(msg.content)) { for (const item of msg.content) { if (item.type === "text") { - content.push({ type: "input_text", text: item.text || "" }); + content.push({ type: contentType, text: item.text || "" }); } else if (item.type === "image_url") { content.push({ type: "input_image", image_url: item.image_url }); } else { @@ -30,7 +42,9 @@ export function messagesToInput(messages: Message[]): InputItem[] { }); } -export function filterInput(input: InputItem[] | undefined): InputItem[] | undefined { +export function filterInput( + input: InputItem[] | undefined, +): InputItem[] | undefined { if (!Array.isArray(input)) { return input; } @@ -58,7 +72,10 @@ export async function transformChatCompletionRequest( ): Promise { const normalizedModel = getNormalizedModel(request.model) || request.model; - logDebug(null, `Transforming chat completion request: model=${request.model} -> ${normalizedModel}`); + logDebug( + null, + `Transforming chat completion request: model=${request.model} -> ${normalizedModel}`, + ); const codexInstructions = await getCodexInstructions(normalizedModel); const reasoningConfig = getReasoningConfig(normalizedModel); @@ -101,7 +118,10 @@ export async function transformResponsesRequest( ): Promise { const normalizedModel = getNormalizedModel(request.model) || request.model; - logDebug(null, `Transforming responses request: model=${request.model} -> ${normalizedModel}`); + logDebug( + null, + `Transforming responses request: model=${request.model} -> ${normalizedModel}`, + ); const codexInstructions = await getCodexInstructions(normalizedModel); const reasoningConfig = getReasoningConfig( @@ -116,7 +136,10 @@ export async function transformResponsesRequest( ...request, model: normalizedModel, input: filterInput( - request.input || (request.messages ? messagesToInput(request.messages as Message[]) : undefined), + request.input || + (request.messages + ? messagesToInput(request.messages as Message[]) + : undefined), ), stream: request.stream !== undefined ? request.stream : true, store: request.store !== undefined ? request.store : false,