import { randomBytes, randomUUID } from 'node:crypto'; import { gzipSync } from 'node:zlib'; import { loadApiKey, loadCliUserContextBlocks, loadSystemPrompt } from './direct-config'; import { logDebug, logError, logInfo } from './direct-logger'; import type { DirectUpstreamMessage, DirectUpstreamOptions, UpstreamChunk } from './direct-types'; import { parseUpstreamChunk, type CanonicalEvent } from './direct-canonical'; const endpoint = process.env.CODEBUDDY_DIRECT_ENDPOINT ?? 'https://copilot.tencent.com/v2/chat/completions'; const defaultModel = process.env.CODEBUDDY_MODEL ?? 'minimax-m2.7'; export { loadApiKey, loadCliUserContextBlocks, loadSystemPrompt } from './direct-config'; export function buildDirectHeaders(apiKey = loadApiKey()): Record { const traceId = randomBytes(16).toString('hex'); const spanId = randomBytes(8).toString('hex'); return { 'content-type': 'application/json', 'content-encoding': 'gzip', accept: 'application/json', 'x-requested-with': 'XMLHttpRequest', authorization: `Bearer ${apiKey}`, 'x-api-key': apiKey, 'x-conversation-id': randomUUID(), 'x-conversation-request-id': randomBytes(16).toString('hex'), 'x-conversation-message-id': randomBytes(16).toString('hex'), 'x-agent-intent': 'craft', 'x-ide-type': 'CLI', 'x-ide-name': 'CLI', 'x-ide-version': '2.93.3', 'user-agent': process.env.CODEBUDDY_USER_AGENT ?? 'CLI/2.93.3 CodeBuddy/2.93.3 CodeBuddy Agent SDK/0.3.28 (Node.js/25.2.1) CodeBuddy Code/2.93.3', 'x-trace-id': traceId, 'x-request-id': traceId, b3: `${traceId}-${spanId}-1-`, 'x-b3-traceid': traceId, 'x-b3-parentspanid': '', 'x-b3-spanid': spanId, 'x-b3-sampled': '1', 'x-codebuddy-request': '1', 'x-user-id': `anonymous_${apiKey.slice(-8)}`, 'x-product': 'SaaS', }; } export function buildDirectRequestBody(messages: DirectUpstreamMessage[], options: DirectUpstreamOptions = {}) { return gzipSync(JSON.stringify({ model: options.model ?? defaultModel, messages, tools: options.tools, tool_choice: options.tool_choice, stream: true, stream_options: { include_usage: true }, temperature: options.temperature ?? Number(process.env.CODEBUDDY_TEMPERATURE ?? 1), max_tokens: options.max_tokens ?? Number(process.env.CODEBUDDY_MAX_TOKENS ?? 48000), reasoning_effort: options.reasoning_effort ?? process.env.CODEBUDDY_REASONING_EFFORT ?? 'medium', verbosity: options.verbosity ?? process.env.CODEBUDDY_VERBOSITY ?? 'high', reasoning_summary: options.reasoning_summary ?? process.env.CODEBUDDY_REASONING_SUMMARY ?? 'auto', })); } export async function* streamDirectCanonicalEvents( messages: DirectUpstreamMessage[], options: DirectUpstreamOptions = {}, ): AsyncGenerator { const startedAt = Date.now(); logInfo('upstream request started', { endpoint, model: options.model ?? defaultModel, messages: messages.length, tools: options.tools?.length ?? 0, stream: true, }); const response = await fetch(endpoint, { method: 'POST', headers: buildDirectHeaders(), body: buildDirectRequestBody(messages, options), }); if (!response.ok || !response.body) { const responseText = await response.text(); logError('upstream request failed', { status: response.status, statusText: response.statusText, durationMs: Date.now() - startedAt, bodyPreview: responseText.slice(0, 1000), }); throw new Error(`HTTP ${response.status}: ${responseText}`); } logInfo('upstream stream opened', { status: response.status, durationMs: Date.now() - startedAt, }); const decoder = new TextDecoder(); let buffer = ''; let chunkCount = 0; let eventCount = 0; for await (const chunk of response.body) { chunkCount += 1; buffer += decoder.decode(chunk, { stream: true }); const lines = buffer.split(/\r?\n/); buffer = lines.pop() ?? ''; for (const line of lines) { if (!line.startsWith('data:')) continue; const data = line.slice(5).trim(); if (!data || data === '[DONE]') continue; try { const parsed = JSON.parse(data) as UpstreamChunk; for (const event of parseUpstreamChunk(parsed)) { eventCount += 1; yield event; } } catch (error) { logDebug('ignored malformed upstream stream data', { error: error instanceof Error ? error.message : String(error), dataPreview: data.slice(0, 300), }); continue; } } } logInfo('upstream stream finished', { durationMs: Date.now() - startedAt, chunks: chunkCount, events: eventCount, }); }