修复若干bug,当前已经可以完美适配

This commit is contained in:
2026-04-23 21:13:08 +08:00
parent 3e6fe3a6a1
commit d071c917b0
7 changed files with 357 additions and 38 deletions

View File

@@ -48,8 +48,11 @@ export class ResponsesStreamEncoder {
private openedReasoning = false;
private messageOutputIndex: number | null = null;
private reasoningOutputIndex: number | null = null;
private messageText = '';
private reasoningText = '';
private completed = false;
private toolItems = new Map<number, { itemId: string; callId: string; name: string; opened: boolean; outputIndex: number }>();
private finalUsage: CanonicalState['usage'] = null;
private toolItems = new Map<number, { itemId: string; callId: string; name: string; argumentsText: string; opened: boolean; outputIndex: number }>();
constructor(private readonly metadata: CanonicalMetadata) {
this.messageItemId = `item_${metadata.id}_message`;
@@ -68,6 +71,7 @@ export class ResponsesStreamEncoder {
const output: Array<Record<string, unknown>> = [];
if (event.type === 'reasoning_delta') {
this.reasoningText += event.text;
if (!this.openedReasoning) {
this.reasoningOutputIndex = this.allocateOutputIndex();
output.push({
@@ -90,6 +94,7 @@ export class ResponsesStreamEncoder {
}
if (event.type === 'text_delta') {
this.messageText += event.text;
if (!this.openedMessage) {
this.messageOutputIndex = this.allocateOutputIndex();
output.push({
@@ -134,17 +139,11 @@ export class ResponsesStreamEncoder {
}
if (event.type === 'usage') {
this.finalUsage = event.usage;
output.push({
type: 'response.completed',
sequence_number: this.sequenceNumber++,
response: {
id: this.metadata.id,
object: 'response',
model: this.metadata.model,
status: 'completed',
output: [],
usage: event.usage,
},
response: this.completedResponse(),
});
this.completed = true;
}
@@ -157,13 +156,7 @@ export class ResponsesStreamEncoder {
return [{
type: 'response.completed',
sequence_number: this.sequenceNumber++,
response: {
id: this.metadata.id,
object: 'response',
model: this.metadata.model,
status: 'completed',
output: [],
},
response: this.completedResponse(),
}];
}
@@ -184,6 +177,7 @@ export class ResponsesStreamEncoder {
private toolArgumentsDelta(index: number, delta: string): Record<string, unknown> {
const item = this.getToolItem(index);
item.argumentsText += delta;
return {
type: 'response.function_call_arguments.delta',
sequence_number: this.sequenceNumber++,
@@ -198,24 +192,52 @@ export class ResponsesStreamEncoder {
private closeOpenItems(): Array<Record<string, unknown>> {
const output: Array<Record<string, unknown>> = [];
if (this.openedReasoning) {
output.push({ type: 'response.reasoning_summary_text.done', sequence_number: this.sequenceNumber++, output_index: this.reasoningOutputIndex, summary_index: 0, item_id: this.reasoningItemId });
output.push({ type: 'response.output_item.done', sequence_number: this.sequenceNumber++, output_index: this.reasoningOutputIndex, item: { type: 'reasoning', id: this.reasoningItemId, status: 'completed' } });
output.push({ type: 'response.reasoning_summary_text.done', sequence_number: this.sequenceNumber++, output_index: this.reasoningOutputIndex, summary_index: 0, item_id: this.reasoningItemId, text: this.reasoningText });
output.push({ type: 'response.output_item.done', sequence_number: this.sequenceNumber++, output_index: this.reasoningOutputIndex, item: makeReasoningItem(this.reasoningItemId, this.reasoningText) });
this.openedReasoning = false;
}
if (this.openedMessage) {
output.push({ type: 'response.output_text.done', sequence_number: this.sequenceNumber++, output_index: this.messageOutputIndex, content_index: 0, item_id: this.messageItemId });
output.push({ type: 'response.output_item.done', sequence_number: this.sequenceNumber++, output_index: this.messageOutputIndex, item: { type: 'message', id: this.messageItemId, role: 'assistant', status: 'completed' } });
output.push({ type: 'response.output_text.done', sequence_number: this.sequenceNumber++, output_index: this.messageOutputIndex, content_index: 0, item_id: this.messageItemId, text: this.messageText });
output.push({ type: 'response.output_item.done', sequence_number: this.sequenceNumber++, output_index: this.messageOutputIndex, item: makeMessageItem(this.messageItemId, this.messageText) });
this.openedMessage = false;
}
for (const item of [...this.toolItems.values()].sort((left, right) => left.outputIndex - right.outputIndex)) {
if (!item.opened) continue;
output.push({ type: 'response.function_call_arguments.done', sequence_number: this.sequenceNumber++, output_index: item.outputIndex, item_id: item.itemId, call_id: item.callId, name: item.name });
output.push({ type: 'response.output_item.done', sequence_number: this.sequenceNumber++, output_index: item.outputIndex, item: { type: 'function_call', id: item.itemId, status: 'completed' } });
output.push({ type: 'response.function_call_arguments.done', sequence_number: this.sequenceNumber++, output_index: item.outputIndex, item_id: item.itemId, call_id: item.callId, name: item.name, arguments: item.argumentsText });
output.push({ type: 'response.output_item.done', sequence_number: this.sequenceNumber++, output_index: item.outputIndex, item: makeFunctionCallItem(item.itemId, item.callId, item.name, item.argumentsText) });
item.opened = false;
}
return output;
}
private completedResponse(): Record<string, unknown> {
return {
id: this.metadata.id,
object: 'response',
model: this.metadata.model,
status: 'completed',
output: this.completedOutput(),
usage: this.finalUsage ?? undefined,
};
}
private completedOutput(): Array<Record<string, unknown>> {
const items: Array<{ outputIndex: number; item: Record<string, unknown> }> = [];
if (this.reasoningOutputIndex !== null) {
items.push({ outputIndex: this.reasoningOutputIndex, item: makeReasoningItem(this.reasoningItemId, this.reasoningText) });
}
if (this.messageOutputIndex !== null) {
items.push({ outputIndex: this.messageOutputIndex, item: makeMessageItem(this.messageItemId, this.messageText) });
}
for (const item of this.toolItems.values()) {
items.push({
outputIndex: item.outputIndex,
item: makeFunctionCallItem(item.itemId, item.callId, item.name, item.argumentsText),
});
}
return items.sort((left, right) => left.outputIndex - right.outputIndex).map(({ item }) => item);
}
private allocateOutputIndex(): number {
return this.nextOutputIndex++;
}
@@ -228,6 +250,7 @@ export class ResponsesStreamEncoder {
itemId,
callId: id ?? `call_${itemId}`,
name: name ?? '',
argumentsText: '',
opened: false,
outputIndex: this.allocateOutputIndex(),
};

View File

@@ -349,9 +349,31 @@ test('buildUpstreamOptionsFromChat passes through model tools and reasoning sett
assert.equal(options.reasoning_effort, 'high');
assert.equal(options.verbosity, 'low');
assert.equal(options.reasoning_summary, 'detailed');
assert.equal(options.tool_choice, undefined);
assert.deepEqual(options.tools, [{ type: 'function', function: { name: 'lookup', description: 'd', parameters: { type: 'object' } } }]);
});
test('buildUpstreamOptionsFromChat omits explicit auto tool_choice', () => {
const options = buildUpstreamOptionsFromChat({
tools: [{ type: 'function', function: { name: 'lookup', parameters: { type: 'object' } } }],
tool_choice: 'auto',
});
assert.equal(options.tool_choice, undefined);
assert.equal(options.tools?.[0]?.function.name, 'lookup');
assert.deepEqual(options.tools?.[0]?.function.parameters, { type: 'object' });
});
test('buildUpstreamOptionsFromChat honors tool_choice none', () => {
const options = buildUpstreamOptionsFromChat({
tools: [{ type: 'function', function: { name: 'lookup', parameters: { type: 'object' } } }],
tool_choice: 'none',
});
assert.equal(options.tool_choice, 'none');
assert.equal(options.tools, undefined);
});
test('buildUpstreamOptionsFromResponses passes through model tools and reasoning settings', () => {
const options = buildUpstreamOptionsFromResponses({
model: 'glm-5.1',
@@ -368,9 +390,20 @@ test('buildUpstreamOptionsFromResponses passes through model tools and reasoning
assert.equal(options.reasoning_effort, 'medium');
assert.equal(options.reasoning_summary, 'auto');
assert.equal(options.verbosity, 'high');
assert.equal(options.tool_choice, undefined);
assert.deepEqual(options.tools, [{ type: 'function', function: { name: 'lookup', description: 'd', parameters: { type: 'object' } } }]);
});
test('buildUpstreamOptionsFromResponses honors tool_choice none', () => {
const options = buildUpstreamOptionsFromResponses({
tools: [{ type: 'function', name: 'lookup', parameters: { type: 'object' } }],
tool_choice: 'none',
});
assert.equal(options.tool_choice, 'none');
assert.equal(options.tools, undefined);
});
test('buildUpstreamOptionsFromAnthropic passes through model tools and thinking budget', () => {
const options = buildUpstreamOptionsFromAnthropic({
model: 'hunyuan-2.0-thinking',
@@ -382,9 +415,20 @@ test('buildUpstreamOptionsFromAnthropic passes through model tools and thinking
assert.equal(options.model, 'hunyuan-2.0-thinking');
assert.equal(options.max_tokens, 1024);
assert.equal(options.reasoning_effort, 'high');
assert.equal(options.tool_choice, undefined);
assert.deepEqual(options.tools, [{ type: 'function', function: { name: 'lookup', description: 'd', parameters: { type: 'object' } } }]);
});
test('buildUpstreamOptionsFromAnthropic maps tool_choice none', () => {
const options = buildUpstreamOptionsFromAnthropic({
tools: [{ name: 'lookup', input_schema: { type: 'object' } }],
tool_choice: { type: 'none' },
});
assert.equal(options.tool_choice, 'none');
assert.equal(options.tools, undefined);
});
test('loadModels reads config-backed model registry', () => {
const models = loadModels();
assert.ok(models.some((model) => model.id === 'kimi-k2.6' && model.name === 'Kimi-K2.6' && model.credits_multiplier === 0.59));
@@ -604,6 +648,7 @@ test('collectResponsesStreamEvents emits sub2api-style response events', () => {
assert.ok(events.some((event) => event.type === 'response.reasoning_summary_text.delta'));
assert.ok(events.some((event) => event.type === 'response.output_text.delta'));
assert.equal(events.at(-1)?.type, 'response.completed');
assert.ok(JSON.stringify(events.at(-1)).includes('hello'));
});
test('collectResponsesStreamEvents starts text-only output at index zero', () => {
@@ -617,10 +662,13 @@ test('collectResponsesStreamEvents starts text-only output at index zero', () =>
const added = events.find((event) => event.type === 'response.output_item.added') as { output_index?: number } | undefined;
const delta = events.find((event) => event.type === 'response.output_text.delta') as { output_index?: number } | undefined;
const done = events.find((event) => event.type === 'response.output_text.done') as { output_index?: number } | undefined;
const done = events.find((event) => event.type === 'response.output_text.done') as { output_index?: number; text?: string } | undefined;
const completed = events.at(-1) as { response?: { output?: Array<{ content?: Array<{ text?: string }> }> } };
assert.equal(added?.output_index, 0);
assert.equal(delta?.output_index, 0);
assert.equal(done?.output_index, 0);
assert.equal(done?.text, 'hello');
assert.equal(completed.response?.output?.[0]?.content?.[0]?.text, 'hello');
});
test('collectResponsesStreamEvents emits function call stream events', () => {
@@ -660,10 +708,13 @@ test('collectResponsesStreamEvents starts tool-only output at index zero', () =>
const added = events.find((event) => event.type === 'response.output_item.added') as { output_index?: number } | undefined;
const delta = events.find((event) => event.type === 'response.function_call_arguments.delta') as { output_index?: number } | undefined;
const done = events.find((event) => event.type === 'response.function_call_arguments.done') as { output_index?: number } | undefined;
const done = events.find((event) => event.type === 'response.function_call_arguments.done') as { output_index?: number; arguments?: string } | undefined;
const completed = events.at(-1) as { response?: { output?: Array<{ arguments?: string }> } };
assert.equal(added?.output_index, 0);
assert.equal(delta?.output_index, 0);
assert.equal(done?.output_index, 0);
assert.equal(done?.arguments, '{"city":"Shanghai"}');
assert.equal(completed.response?.output?.[0]?.arguments, '{"city":"Shanghai"}');
});
test('buildAnthropicMessageResponse includes thinking and text blocks', () => {

71
src/direct-logger.ts Normal file
View File

@@ -0,0 +1,71 @@
type LogLevel = 'debug' | 'info' | 'warn' | 'error' | 'silent';
const levelWeights: Record<LogLevel, number> = {
debug: 10,
info: 20,
warn: 30,
error: 40,
silent: 50,
};
const configuredLevel = normalizeLevel(process.env.DIRECT_LOG_LEVEL);
export function logDebug(message: string, details?: Record<string, unknown>): void {
writeLog('debug', message, details);
}
export function logInfo(message: string, details?: Record<string, unknown>): void {
writeLog('info', message, details);
}
export function logWarn(message: string, details?: Record<string, unknown>): void {
writeLog('warn', message, details);
}
export function logError(message: string, details?: Record<string, unknown>): void {
writeLog('error', message, details);
}
export function errorDetails(error: unknown): Record<string, unknown> {
if (error instanceof Error) {
return {
name: error.name,
message: error.message,
stack: process.env.DIRECT_LOG_STACK === '1' ? error.stack : undefined,
};
}
return { message: String(error) };
}
function writeLog(level: Exclude<LogLevel, 'silent'>, message: string, details?: Record<string, unknown>): void {
if (levelWeights[level] < levelWeights[configuredLevel]) return;
const entry = {
timestamp: new Date().toISOString(),
level,
message,
...dropUndefined(details),
};
const line = JSON.stringify(entry);
if (level === 'error') {
console.error(line);
return;
}
if (level === 'warn') {
console.warn(line);
return;
}
console.log(line);
}
function normalizeLevel(value: string | undefined): LogLevel {
const normalized = value?.trim().toLowerCase();
if (normalized === 'debug' || normalized === 'info' || normalized === 'warn' || normalized === 'error' || normalized === 'silent') {
return normalized;
}
return 'info';
}
function dropUndefined(details: Record<string, unknown> | undefined): Record<string, unknown> {
if (!details) return {};
return Object.fromEntries(Object.entries(details).filter(([, value]) => value !== undefined));
}

View File

@@ -33,9 +33,12 @@ export function buildUpstreamMessagesFromChat(
}
export function buildUpstreamOptionsFromChat(request: OpenAIChatRequestLike): DirectUpstreamOptions {
const tools = normalizeChatTools(request.tools);
const toolChoice = normalizeToolChoice(request.tool_choice, tools);
return {
model: request.model,
tools: normalizeChatTools(request.tools),
tools: toolChoice === 'none' ? undefined : tools,
tool_choice: toolChoice,
temperature: request.temperature,
max_tokens: request.max_tokens,
reasoning_effort: request.reasoning_effort,
@@ -62,9 +65,12 @@ export function buildUpstreamMessagesFromResponses(
}
export function buildUpstreamOptionsFromResponses(request: ResponsesRequestLike): DirectUpstreamOptions {
const tools = normalizeResponsesTools(request.tools);
const toolChoice = normalizeToolChoice(request.tool_choice, tools);
return {
model: request.model,
tools: normalizeResponsesTools(request.tools),
tools: toolChoice === 'none' ? undefined : tools,
tool_choice: toolChoice,
temperature: request.temperature,
max_tokens: request.max_output_tokens,
reasoning_effort: request.reasoning?.effort,
@@ -89,9 +95,12 @@ export function buildUpstreamMessagesFromAnthropic(
}
export function buildUpstreamOptionsFromAnthropic(request: AnthropicMessagesRequestLike): DirectUpstreamOptions {
const tools = normalizeAnthropicTools(request.tools);
const toolChoice = normalizeToolChoice(request.tool_choice, tools);
return {
model: request.model,
tools: normalizeAnthropicTools(request.tools),
tools: toolChoice === 'none' ? undefined : tools,
tool_choice: toolChoice,
max_tokens: request.max_tokens,
reasoning_effort: request.thinking?.type === 'enabled' ? 'high' : undefined,
};
@@ -224,6 +233,31 @@ function normalizeAnthropicTools(tools: AnthropicMessagesRequestLike['tools']):
return normalized.length ? normalized : undefined;
}
function normalizeToolChoice(toolChoice: unknown, tools: DirectTool[] | undefined): unknown {
if (!tools?.length) return undefined;
if (typeof toolChoice === 'string') {
if (toolChoice === 'none' || toolChoice === 'required') return toolChoice;
if (toolChoice === 'auto') return undefined;
return undefined;
}
if (!toolChoice || typeof toolChoice !== 'object') return undefined;
const choice = toolChoice as {
type?: string;
name?: string;
function?: { name?: string };
};
if (choice.type === 'none') return 'none';
if (choice.type === 'auto') return undefined;
if (choice.type === 'any') return 'required';
if (choice.type === 'required') return 'required';
if (choice.type === 'function') return toolChoice;
if (choice.type === 'tool' && choice.name) {
return { type: 'function', function: { name: choice.name } };
}
return undefined;
}
function anthropicContentToUpstream(
role: string | undefined,
content: unknown,

View File

@@ -25,6 +25,7 @@ import {
buildUpstreamOptionsFromResponses,
} from './direct-request-builders';
import { loadCliUserContextBlocks, loadModels, loadSystemPrompt, loadSystemPromptMode } from './direct-config';
import { errorDetails, logDebug, logError, logInfo, logWarn } from './direct-logger';
import { streamDirectCanonicalEvents } from './direct-upstream';
import type {
AnthropicMessagesRequestLike,
@@ -43,19 +44,33 @@ const maxRequestBytes = Number(process.env.DIRECT_MAX_REQUEST_BYTES ?? 1024 * 10
const server = createServer(async (req, res) => {
const url = new URL(req.url ?? '/', `http://${req.headers.host ?? '127.0.0.1'}`);
const requestId = `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`;
const startedAt = Date.now();
logInfo('request received', {
requestId,
method: req.method,
path: url.pathname,
contentLength: req.headers['content-length'],
userAgent: req.headers['user-agent'],
});
try {
if (req.method === 'GET' && url.pathname === '/health') {
return writeJson(res, 200, { ok: true, mode: 'direct' });
writeJson(res, 200, { ok: true, mode: 'direct' });
logRequestFinished(requestId, req, res, startedAt);
return;
}
if (!authorize(req)) {
logWarn('request unauthorized', { requestId, path: url.pathname });
writeEndpointError(res, url.pathname, 401, 'Unauthorized');
logRequestFinished(requestId, req, res, startedAt);
return;
}
if (req.method === 'GET' && url.pathname === '/v1/models') {
return writeJson(res, 200, {
writeJson(res, 200, {
object: 'list',
data: models.map((model) => ({
id: model.id,
@@ -65,43 +80,85 @@ const server = createServer(async (req, res) => {
credits_multiplier: model.credits_multiplier,
})),
});
logRequestFinished(requestId, req, res, startedAt);
return;
}
if (req.method === 'POST' && url.pathname === '/v1/chat/completions') {
await handleChat(req, res);
await handleChat(req, res, requestId);
logRequestFinished(requestId, req, res, startedAt);
return;
}
if (req.method === 'POST' && url.pathname === '/v1/responses') {
await handleResponses(req, res);
await handleResponses(req, res, requestId);
logRequestFinished(requestId, req, res, startedAt);
return;
}
if (req.method === 'POST' && url.pathname === '/v1/messages') {
await handleMessages(req, res);
await handleMessages(req, res, requestId);
logRequestFinished(requestId, req, res, startedAt);
return;
}
writeJson(res, 404, { error: { message: 'Not found' } });
logRequestFinished(requestId, req, res, startedAt);
} catch (error) {
logError('request failed', {
requestId,
method: req.method,
path: url.pathname,
...errorDetails(error),
});
if (!res.headersSent) {
if (error instanceof RequestBodyError) {
writeEndpointError(res, url.pathname, error.statusCode, error.message);
logRequestFinished(requestId, req, res, startedAt);
return;
}
writeJson(res, 502, { error: { message: 'Upstream request failed' } });
} else {
res.end();
}
logRequestFinished(requestId, req, res, startedAt);
}
});
server.listen(port, '127.0.0.1', () => {
console.log(`codebuddy direct service listening on http://127.0.0.1:${port}`);
logInfo('codebuddy direct service listening', {
url: `http://127.0.0.1:${port}`,
proxyAuthEnabled: Boolean(proxyApiKey),
systemPromptMode,
systemPromptChars: systemPrompt.length,
cliUserContextBlocks: cliUserContextBlocks.length,
models: models.length,
maxRequestBytes,
});
});
async function handleChat(req: IncomingMessage, res: ServerResponse): Promise<void> {
server.on('clientError', (error, socket) => {
logWarn('client connection error', errorDetails(error));
socket.end('HTTP/1.1 400 Bad Request\r\n\r\n');
});
process.on('uncaughtException', (error) => {
logError('uncaught exception', errorDetails(error));
});
process.on('unhandledRejection', (reason) => {
logError('unhandled rejection', errorDetails(reason));
});
async function handleChat(req: IncomingMessage, res: ServerResponse, requestId: string): Promise<void> {
const body = await readJson<OpenAIChatRequestLike>(req);
const metadata = { id: `chatcmpl_${Date.now()}`, model: body.model ?? (process.env.CODEBUDDY_MODEL ?? 'minimax-m2.7'), created: Math.floor(Date.now() / 1000) };
const upstreamMessages = buildUpstreamMessagesFromChat(body, { systemPrompt, systemPromptMode, cliUserContextBlocks });
const upstreamOptions = buildUpstreamOptionsFromChat(body);
logDebug('chat request converted', {
requestId,
model: metadata.model,
stream: Boolean(body.stream),
upstreamMessages: summarizeMessages(upstreamMessages),
tools: upstreamOptions.tools?.length ?? 0,
});
if (body.stream) {
writeSseHeaders(res);
@@ -120,11 +177,18 @@ async function handleChat(req: IncomingMessage, res: ServerResponse): Promise<vo
writeJson(res, 200, buildChatCompletionResponse(state));
}
async function handleResponses(req: IncomingMessage, res: ServerResponse): Promise<void> {
async function handleResponses(req: IncomingMessage, res: ServerResponse, requestId: string): Promise<void> {
const body = await readJson<ResponsesRequestLike>(req);
const metadata = { id: `resp_${Date.now()}`, model: body.model ?? (process.env.CODEBUDDY_MODEL ?? 'minimax-m2.7'), created: Math.floor(Date.now() / 1000) };
const upstreamMessages = buildUpstreamMessagesFromResponses(body, { systemPrompt, systemPromptMode, cliUserContextBlocks });
const upstreamOptions = buildUpstreamOptionsFromResponses(body);
logDebug('responses request converted', {
requestId,
model: metadata.model,
stream: Boolean(body.stream),
upstreamMessages: summarizeMessages(upstreamMessages),
tools: upstreamOptions.tools?.length ?? 0,
});
if (body.stream) {
writeSseHeaders(res);
@@ -149,11 +213,18 @@ async function handleResponses(req: IncomingMessage, res: ServerResponse): Promi
writeJson(res, 200, buildResponsesResponse(state));
}
async function handleMessages(req: IncomingMessage, res: ServerResponse): Promise<void> {
async function handleMessages(req: IncomingMessage, res: ServerResponse, requestId: string): Promise<void> {
const body = await readJson<AnthropicMessagesRequestLike>(req);
const metadata = { id: `msg_${Date.now()}`, model: body.model ?? (process.env.CODEBUDDY_MODEL ?? 'minimax-m2.7'), created: Math.floor(Date.now() / 1000) };
const upstreamMessages = buildUpstreamMessagesFromAnthropic(body, { systemPrompt, systemPromptMode, cliUserContextBlocks });
const upstreamOptions = buildUpstreamOptionsFromAnthropic(body);
logDebug('messages request converted', {
requestId,
model: metadata.model,
stream: Boolean(body.stream),
upstreamMessages: summarizeMessages(upstreamMessages),
tools: upstreamOptions.tools?.length ?? 0,
});
if (body.stream) {
writeSseHeaders(res);
@@ -249,6 +320,34 @@ function writeEndpointError(res: ServerResponse, endpoint: string, statusCode: n
writeJson(res, statusCode, { error: { message } });
}
function logRequestFinished(
requestId: string,
req: IncomingMessage,
res: ServerResponse,
startedAt: number,
): void {
logInfo('request finished', {
requestId,
method: req.method,
path: req.url?.split('?')[0],
statusCode: res.statusCode,
durationMs: Date.now() - startedAt,
});
}
function summarizeMessages(messages: ReturnType<typeof buildUpstreamMessagesFromChat>): Array<Record<string, unknown>> {
return messages.map((message) => ({
role: message.role,
agent: message.agent,
contentChars: typeof message.content === 'string'
? message.content.length
: message.content.reduce((total, block) => total + block.text.length, 0),
contentBlocks: Array.isArray(message.content) ? message.content.length : undefined,
toolCalls: message.tool_calls?.length ?? 0,
toolCallId: message.tool_call_id,
}));
}
class RequestBodyError extends Error {
constructor(
readonly statusCode: number,

View File

@@ -82,6 +82,7 @@ export type DirectBuilderContext = {
export type DirectUpstreamOptions = {
model?: string;
tools?: DirectTool[];
tool_choice?: unknown;
temperature?: number;
max_tokens?: number;
reasoning_effort?: string;
@@ -109,6 +110,7 @@ export type OpenAIChatRequestLike = {
model?: string;
messages?: OpenAIChatInputMessage[];
tools?: OpenAIChatToolLike[];
tool_choice?: unknown;
temperature?: number;
max_tokens?: number;
reasoning_effort?: string;
@@ -144,6 +146,7 @@ export type ResponsesRequestLike = {
instructions?: unknown;
input?: string | ResponsesInputMessage[];
tools?: ResponsesToolLike[];
tool_choice?: unknown;
temperature?: number;
max_output_tokens?: number;
reasoning?: {
@@ -182,6 +185,7 @@ export type AnthropicMessagesRequestLike = {
system?: unknown;
messages?: AnthropicMessageLike[];
tools?: AnthropicToolLike[];
tool_choice?: unknown;
max_tokens?: number;
thinking?: {
type?: string;

View File

@@ -1,6 +1,7 @@
import { randomBytes, randomUUID } from 'node:crypto';
import { gzipSync } from 'node:zlib';
import { loadApiKey, loadCliUserContextBlocks, loadSystemPrompt } from './direct-config';
import { logDebug, logError, logInfo } from './direct-logger';
import type { DirectUpstreamMessage, DirectUpstreamOptions, UpstreamChunk } from './direct-types';
import { parseUpstreamChunk, type CanonicalEvent } from './direct-canonical';
@@ -45,6 +46,7 @@ export function buildDirectRequestBody(messages: DirectUpstreamMessage[], option
model: options.model ?? defaultModel,
messages,
tools: options.tools,
tool_choice: options.tool_choice,
stream: true,
stream_options: { include_usage: true },
temperature: options.temperature ?? Number(process.env.CODEBUDDY_TEMPERATURE ?? 1),
@@ -59,6 +61,15 @@ export async function* streamDirectCanonicalEvents(
messages: DirectUpstreamMessage[],
options: DirectUpstreamOptions = {},
): AsyncGenerator<CanonicalEvent> {
const startedAt = Date.now();
logInfo('upstream request started', {
endpoint,
model: options.model ?? defaultModel,
messages: messages.length,
tools: options.tools?.length ?? 0,
stream: true,
});
const response = await fetch(endpoint, {
method: 'POST',
headers: buildDirectHeaders(),
@@ -66,13 +77,28 @@ export async function* streamDirectCanonicalEvents(
});
if (!response.ok || !response.body) {
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
const responseText = await response.text();
logError('upstream request failed', {
status: response.status,
statusText: response.statusText,
durationMs: Date.now() - startedAt,
bodyPreview: responseText.slice(0, 1000),
});
throw new Error(`HTTP ${response.status}: ${responseText}`);
}
logInfo('upstream stream opened', {
status: response.status,
durationMs: Date.now() - startedAt,
});
const decoder = new TextDecoder();
let buffer = '';
let chunkCount = 0;
let eventCount = 0;
for await (const chunk of response.body) {
chunkCount += 1;
buffer += decoder.decode(chunk, { stream: true });
const lines = buffer.split(/\r?\n/);
buffer = lines.pop() ?? '';
@@ -84,11 +110,22 @@ export async function* streamDirectCanonicalEvents(
try {
const parsed = JSON.parse(data) as UpstreamChunk;
for (const event of parseUpstreamChunk(parsed)) {
eventCount += 1;
yield event;
}
} catch {
} catch (error) {
logDebug('ignored malformed upstream stream data', {
error: error instanceof Error ? error.message : String(error),
dataPreview: data.slice(0, 300),
});
continue;
}
}
}
logInfo('upstream stream finished', {
durationMs: Date.now() - startedAt,
chunks: chunkCount,
events: eventCount,
});
}