首次转发成功

This commit is contained in:
2026-04-23 20:50:35 +08:00
parent a1587b8d12
commit 3e6fe3a6a1
20 changed files with 2884 additions and 4 deletions

94
src/direct-upstream.ts Normal file
View File

@@ -0,0 +1,94 @@
import { randomBytes, randomUUID } from 'node:crypto';
import { gzipSync } from 'node:zlib';
import { loadApiKey, loadCliUserContextBlocks, loadSystemPrompt } from './direct-config';
import type { DirectUpstreamMessage, DirectUpstreamOptions, UpstreamChunk } from './direct-types';
import { parseUpstreamChunk, type CanonicalEvent } from './direct-canonical';
const endpoint = process.env.CODEBUDDY_DIRECT_ENDPOINT ?? 'https://copilot.tencent.com/v2/chat/completions';
const defaultModel = process.env.CODEBUDDY_MODEL ?? 'minimax-m2.7';
export { loadApiKey, loadCliUserContextBlocks, loadSystemPrompt } from './direct-config';
export function buildDirectHeaders(apiKey = loadApiKey()): Record<string, string> {
const traceId = randomBytes(16).toString('hex');
const spanId = randomBytes(8).toString('hex');
return {
'content-type': 'application/json',
'content-encoding': 'gzip',
accept: 'application/json',
'x-requested-with': 'XMLHttpRequest',
authorization: `Bearer ${apiKey}`,
'x-api-key': apiKey,
'x-conversation-id': randomUUID(),
'x-conversation-request-id': randomBytes(16).toString('hex'),
'x-conversation-message-id': randomBytes(16).toString('hex'),
'x-agent-intent': 'craft',
'x-ide-type': 'CLI',
'x-ide-name': 'CLI',
'x-ide-version': '2.93.3',
'user-agent': process.env.CODEBUDDY_USER_AGENT ?? 'CLI/2.93.3 CodeBuddy/2.93.3 CodeBuddy Agent SDK/0.3.28 (Node.js/25.2.1) CodeBuddy Code/2.93.3',
'x-trace-id': traceId,
'x-request-id': traceId,
b3: `${traceId}-${spanId}-1-`,
'x-b3-traceid': traceId,
'x-b3-parentspanid': '',
'x-b3-spanid': spanId,
'x-b3-sampled': '1',
'x-codebuddy-request': '1',
'x-user-id': `anonymous_${apiKey.slice(-8)}`,
'x-product': 'SaaS',
};
}
export function buildDirectRequestBody(messages: DirectUpstreamMessage[], options: DirectUpstreamOptions = {}) {
return gzipSync(JSON.stringify({
model: options.model ?? defaultModel,
messages,
tools: options.tools,
stream: true,
stream_options: { include_usage: true },
temperature: options.temperature ?? Number(process.env.CODEBUDDY_TEMPERATURE ?? 1),
max_tokens: options.max_tokens ?? Number(process.env.CODEBUDDY_MAX_TOKENS ?? 48000),
reasoning_effort: options.reasoning_effort ?? process.env.CODEBUDDY_REASONING_EFFORT ?? 'medium',
verbosity: options.verbosity ?? process.env.CODEBUDDY_VERBOSITY ?? 'high',
reasoning_summary: options.reasoning_summary ?? process.env.CODEBUDDY_REASONING_SUMMARY ?? 'auto',
}));
}
export async function* streamDirectCanonicalEvents(
messages: DirectUpstreamMessage[],
options: DirectUpstreamOptions = {},
): AsyncGenerator<CanonicalEvent> {
const response = await fetch(endpoint, {
method: 'POST',
headers: buildDirectHeaders(),
body: buildDirectRequestBody(messages, options),
});
if (!response.ok || !response.body) {
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
}
const decoder = new TextDecoder();
let buffer = '';
for await (const chunk of response.body) {
buffer += decoder.decode(chunk, { stream: true });
const lines = buffer.split(/\r?\n/);
buffer = lines.pop() ?? '';
for (const line of lines) {
if (!line.startsWith('data:')) continue;
const data = line.slice(5).trim();
if (!data || data === '[DONE]') continue;
try {
const parsed = JSON.parse(data) as UpstreamChunk;
for (const event of parseUpstreamChunk(parsed)) {
yield event;
}
} catch {
continue;
}
}
}
}