官方sdk测试

This commit is contained in:
2026-04-23 03:11:00 +08:00
commit 5bc69bcd5b
13 changed files with 1987 additions and 0 deletions

32
.gitignore vendored Normal file
View File

@@ -0,0 +1,32 @@
# Dependencies
node_modules/
# Build output
dist/
*.tsbuildinfo
# Environment variables
.env
.env.*
# Logs
logs/
*.log
npm-debug.log*
# IDE
.vscode/
.idea/
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Sensitive files
# apikey
# CodeBuddy
.codex
.codebuddy-accounts/

4
apikey Normal file
View File

@@ -0,0 +1,4 @@
ck_fjvu1ygb83r4.ldwWDa9Q6RP-Ntjih7_lSWFO0XuNWDMkJE6epPmMYtM
ck_fjvu2i7nq58g.rLkYuYkOAnlCv4Ym2zYqJZ9IH4yNMg5LTwAVy2mI6KE
ck_fjvu794w7zls.OAPfiIY9PSKEgT512lp-guT8SIFPB6FqJCEFc9D18b4
ck_fjvu7kqkrocg.0T3gTbRESr126rwbASeaaTb9_eWvllluDL_b3IRAspE

1198
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

16
package.json Normal file
View File

@@ -0,0 +1,16 @@
{
"name": "codebuddy2api",
"version": "0.1.0",
"private": true,
"scripts": {
"build": "tsc -p tsconfig.json",
"start": "node dist/server.js"
},
"dependencies": {
"@tencent-ai/agent-sdk": "0.3.136"
},
"devDependencies": {
"@types/node": "^20.19.0",
"typescript": "^5.9.3"
}
}

Binary file not shown.

159
scripts/mitm-redact.py Normal file
View File

@@ -0,0 +1,159 @@
import json
import os
import time
from mitmproxy import http
OUT = os.environ.get("MITM_REDACT_LOG", "/tmp/codebuddy-mitm-events.jsonl")
SENSITIVE_KEYS = {
"authorization",
"proxy-authorization",
"cookie",
"set-cookie",
"x-api-key",
"api-key",
"apikey",
"token",
"access_token",
"refresh_token",
"id_token",
"codebuddy_api_key",
"codebuddy_auth_token",
}
def request(flow: http.HTTPFlow) -> None:
flow.metadata["started_at"] = time.time()
def response(flow: http.HTTPFlow) -> None:
req = flow.request
resp = flow.response
event = {
"ts": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
"duration_ms": round((time.time() - flow.metadata.get("started_at", time.time())) * 1000),
"method": req.method,
"scheme": req.scheme,
"host": req.pretty_host,
"port": req.port,
"path": req.path.split("?")[0],
"query_keys": sorted(req.query.keys()),
"request_headers": sanitize_headers(req.headers),
"request_body": summarize_body(req.headers.get("content-type", ""), safe_content(req)),
"status_code": resp.status_code,
"response_headers": sanitize_headers(resp.headers),
"response_body": summarize_body(resp.headers.get("content-type", ""), safe_content(resp)),
}
append(event)
def error(flow: http.HTTPFlow) -> None:
req = flow.request
append({
"ts": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
"method": req.method,
"scheme": req.scheme,
"host": req.pretty_host,
"port": req.port,
"path": req.path.split("?")[0],
"error": str(flow.error) if flow.error else "unknown",
})
def sanitize_headers(headers) -> dict:
clean = {}
for key, value in headers.items():
lower = key.lower()
clean[key] = "<redacted>" if lower in SENSITIVE_KEYS or "token" in lower or "secret" in lower or "key" in lower else trim(value)
return clean
def safe_content(message):
try:
return message.content
except Exception:
return message.raw_content
def summarize_body(content_type: str, raw: bytes | None):
if not raw:
return {"bytes": 0}
if len(raw) > 2_000_000:
return {"bytes": len(raw), "too_large": True}
text = raw.decode("utf-8", errors="replace")
if "json" in content_type.lower() or looks_like_json(text):
try:
return {"bytes": len(raw), "json_shape": sanitize_json(json.loads(text))}
except Exception:
pass
if "text/event-stream" in content_type.lower():
return {"bytes": len(raw), "sse_events": summarize_sse(text)}
return {"bytes": len(raw), "preview": trim(text)}
KEEP_STRING_KEYS = {
"model",
"role",
"type",
"name",
"object",
"finish_reason",
"reasoning_effort",
}
def sanitize_json(value, key_context: str | None = None):
if isinstance(value, dict):
out = {}
for key, item in value.items():
lower = str(key).lower()
if lower in SENSITIVE_KEYS or "token" in lower or "secret" in lower or "key" in lower:
out[key] = "<redacted>"
else:
out[key] = sanitize_json(item, lower)
return out
if isinstance(value, list):
return [sanitize_json(item, key_context) for item in value[:20]]
if isinstance(value, str):
if key_context in KEEP_STRING_KEYS:
return value
return f"<str:{len(value)}>"
if isinstance(value, (int, float, bool)) or value is None:
return value
return f"<{type(value).__name__}>"
def summarize_sse(text: str):
events = []
current = {}
for line in text.splitlines()[:200]:
if line.startswith("event:"):
current["event"] = line[6:].strip()
elif line.startswith("data:"):
current["data"] = summarize_data_line(line[5:].strip())
events.append(current)
current = {}
return events[:20]
def summarize_data_line(text: str):
if text == "[DONE]":
return text
try:
return sanitize_json(json.loads(text))
except Exception:
return trim(text)
def looks_like_json(text: str) -> bool:
stripped = text.strip()
return stripped.startswith("{") or stripped.startswith("[")
def trim(text: str, limit: int = 240) -> str:
text = text.replace("\r", "\\r").replace("\n", "\\n")
return text if len(text) <= limit else text[:limit] + "...<truncated>"
def append(event: dict) -> None:
with open(OUT, "a", encoding="utf-8") as f:
f.write(json.dumps(event, ensure_ascii=False) + "\n")

125
src/codebuddy.ts Normal file
View File

@@ -0,0 +1,125 @@
import { unstable_v2_createSession } from '@tencent-ai/agent-sdk';
import type { Message, PermissionMode, Session } from '@tencent-ai/agent-sdk';
import type { AccountConfig, AppConfig } from './config';
export type TextHandler = (text: string) => void;
export class CodeBuddyPool {
private readonly workers: AccountWorker[];
private next = 0;
constructor(config: AppConfig) {
this.workers = config.accounts.map((account) => new AccountWorker(config, account));
}
async run(prompt: string, requestedModel: string | undefined, onText?: TextHandler): Promise<string> {
const worker = this.workers[this.next % this.workers.length];
this.next += 1;
return worker.run(prompt, requestedModel, onText);
}
async close(): Promise<void> {
for (const worker of this.workers) worker.close();
}
}
class AccountWorker {
private session?: Session;
private queue: Promise<void> = Promise.resolve();
constructor(
private readonly app: AppConfig,
private readonly account: AccountConfig,
) {}
async run(prompt: string, requestedModel: string | undefined, onText?: TextHandler): Promise<string> {
return this.withLock(async () => {
const session = await this.getSession(requestedModel);
await session.send(prompt);
let resultText = '';
let assistantText = '';
let streamedAny = false;
for await (const message of session.stream()) {
if (message.type === 'stream_event') {
const delta = message.event.type === 'content_block_delta' && message.event.delta.type === 'text_delta'
? message.event.delta.text
: '';
if (delta) {
streamedAny = true;
resultText += delta;
onText?.(delta);
}
continue;
}
if (message.type === 'assistant') {
assistantText += extractAssistantText(message);
if (onText && !streamedAny) {
const text = extractAssistantText(message);
if (text) onText(text);
}
continue;
}
if (message.type === 'result') {
if (message.subtype === 'success') {
return resultText || message.result || assistantText;
}
throw new Error(message.errors?.join('; ') || message.subtype);
}
}
return resultText || assistantText;
});
}
close(): void {
this.session?.close();
this.session = undefined;
}
private async getSession(requestedModel: string | undefined): Promise<Session> {
if (!this.session) {
this.session = unstable_v2_createSession({
cwd: this.app.cwd,
model: requestedModel || this.app.model,
permissionMode: this.app.permissionMode as PermissionMode,
includePartialMessages: true,
settingSources: [],
env: {
CODEBUDDY_API_KEY: this.account.apiKey,
CODEBUDDY_AUTH_TOKEN: this.account.authToken,
CODEBUDDY_INTERNET_ENVIRONMENT: this.account.internetEnvironment,
CODEBUDDY_CONFIG_DIR: this.account.configDir,
},
});
await this.session.connect();
} else if (requestedModel && requestedModel !== this.session.getModel()) {
await this.session.setModel(requestedModel);
}
return this.session;
}
private async withLock<T>(task: () => Promise<T>): Promise<T> {
const previous = this.queue;
let release!: () => void;
this.queue = new Promise<void>((resolve) => {
release = resolve;
});
await previous;
try {
return await task();
} finally {
release();
}
}
}
function extractAssistantText(message: Extract<Message, { type: 'assistant' }>): string {
return message.message.content
.map((block) => block.type === 'text' ? block.text : '')
.filter(Boolean)
.join('');
}

124
src/config.ts Normal file
View File

@@ -0,0 +1,124 @@
import { existsSync, mkdirSync, readFileSync } from 'node:fs';
import { join, resolve } from 'node:path';
export type AccountConfig = {
id: string;
apiKey?: string;
authToken?: string;
internetEnvironment?: string;
configDir: string;
};
export type AppConfig = {
port: number;
proxyApiKey?: string;
model?: string;
passRequestModel: boolean;
permissionMode: 'default' | 'acceptEdits' | 'bypassPermissions' | 'plan' | 'delegate' | 'dontAsk';
cwd: string;
accounts: AccountConfig[];
};
type AccountInput = {
id?: string;
apiKey?: string;
authToken?: string;
internetEnvironment?: string;
configDir?: string;
};
export function loadConfig(): AppConfig {
const cwd = process.cwd();
const accounts = loadAccounts(cwd);
if (accounts.length === 0) {
throw new Error('No CodeBuddy credential found. Provide CODEBUDDY_ACCOUNTS_JSON, CODEBUDDY_API_KEY, CODEBUDDY_AUTH_TOKEN, or an apikey file.');
}
return {
port: Number(process.env.PORT ?? 8787),
proxyApiKey: process.env.PROXY_API_KEY,
model: emptyToUndefined(process.env.CODEBUDDY_MODEL),
passRequestModel: process.env.CODEBUDDY_PASS_REQUEST_MODEL === '1',
permissionMode: (process.env.CODEBUDDY_PERMISSION_MODE as AppConfig['permissionMode'] | undefined) ?? 'bypassPermissions',
cwd,
accounts,
};
}
function loadAccounts(cwd: string): AccountConfig[] {
const fromJson = parseAccountsJson(process.env.CODEBUDDY_ACCOUNTS_JSON, cwd);
if (fromJson.length > 0) return fromJson;
if (process.env.CODEBUDDY_API_KEY || process.env.CODEBUDDY_AUTH_TOKEN) {
return [normalizeAccount({
id: 'env-1',
apiKey: process.env.CODEBUDDY_API_KEY,
authToken: process.env.CODEBUDDY_AUTH_TOKEN,
internetEnvironment: process.env.CODEBUDDY_INTERNET_ENVIRONMENT,
}, cwd, 0)];
}
const file = resolve(cwd, process.env.CODEBUDDY_APIKEY_FILE ?? 'apikey');
if (!existsSync(file)) return [];
const raw = readFileSync(file, 'utf8').trim();
if (!raw) return [];
const parsed = parseAccountsJson(raw, cwd);
if (parsed.length > 0) return parsed;
const tokenKind = process.env.CODEBUDDY_TOKEN_KIND === 'auth_token' ? 'auth_token' : 'api_key';
return raw
.split(/\r?\n/)
.map((line) => line.trim())
.filter((line) => line && !line.startsWith('#'))
.map((line, index) => parseTokenLine(line, tokenKind, index, cwd));
}
function parseAccountsJson(raw: string | undefined, cwd: string): AccountConfig[] {
if (!raw) return [];
try {
const value = JSON.parse(raw) as unknown;
const items = Array.isArray(value)
? value
: typeof value === 'object' && value !== null && Array.isArray((value as { accounts?: unknown }).accounts)
? (value as { accounts: unknown[] }).accounts
: [];
return items.map((item, index) => normalizeAccount(item as AccountInput, cwd, index));
} catch {
return [];
}
}
function parseTokenLine(line: string, tokenKind: 'api_key' | 'auth_token', index: number, cwd: string): AccountConfig {
const eq = line.indexOf('=');
if (eq > 0) {
const key = line.slice(0, eq).trim();
const value = line.slice(eq + 1).trim();
if (key === 'CODEBUDDY_AUTH_TOKEN') return normalizeAccount({ id: `file-${index + 1}`, authToken: value }, cwd, index);
if (key === 'CODEBUDDY_API_KEY') return normalizeAccount({ id: `file-${index + 1}`, apiKey: value }, cwd, index);
}
return normalizeAccount({
id: `file-${index + 1}`,
apiKey: tokenKind === 'api_key' ? line : undefined,
authToken: tokenKind === 'auth_token' ? line : undefined,
}, cwd, index);
}
function normalizeAccount(input: AccountInput, cwd: string, index: number): AccountConfig {
const id = input.id ?? `account-${index + 1}`;
const configDir = input.configDir ?? join(cwd, '.codebuddy-accounts', id);
mkdirSync(configDir, { recursive: true });
return {
id,
apiKey: emptyToUndefined(input.apiKey),
authToken: emptyToUndefined(input.authToken),
internetEnvironment: emptyToUndefined(input.internetEnvironment),
configDir,
};
}
function emptyToUndefined(value: string | undefined): string | undefined {
const trimmed = value?.trim();
return trimmed ? trimmed : undefined;
}

39
src/prompt.ts Normal file
View File

@@ -0,0 +1,39 @@
export type ChatMessage = {
role: string;
content?: unknown;
};
export function openAIChatToPrompt(messages: ChatMessage[]): string {
return messages.map((message) => {
const role = message.role || 'user';
return `${role.toUpperCase()}:\n${contentToText(message.content)}`;
}).join('\n\n');
}
export function anthropicMessagesToPrompt(system: unknown, messages: ChatMessage[]): string {
const parts: string[] = [];
const systemText = contentToText(system);
if (systemText) parts.push(`SYSTEM:\n${systemText}`);
parts.push(openAIChatToPrompt(messages));
return parts.filter(Boolean).join('\n\n');
}
export function contentToText(content: unknown): string {
if (typeof content === 'string') return content;
if (Array.isArray(content)) {
return content.map((part) => {
if (typeof part === 'string') return part;
if (typeof part === 'object' && part !== null) {
const value = part as Record<string, unknown>;
if (typeof value.text === 'string') return value.text;
if (typeof value.content === 'string') return value.content;
}
return '';
}).filter(Boolean).join('\n');
}
if (typeof content === 'object' && content !== null) {
const value = content as Record<string, unknown>;
if (typeof value.text === 'string') return value.text;
}
return '';
}

104
src/protocols.ts Normal file
View File

@@ -0,0 +1,104 @@
import { randomUUID } from 'node:crypto';
import type { ServerResponse } from 'node:http';
import { anthropicMessagesToPrompt, openAIChatToPrompt, type ChatMessage } from './prompt';
export type OpenAIChatRequest = {
model?: string;
messages?: ChatMessage[];
stream?: boolean;
};
export type AnthropicMessagesRequest = {
model?: string;
system?: unknown;
messages?: ChatMessage[];
stream?: boolean;
};
export function openAIPrompt(req: OpenAIChatRequest): string {
return openAIChatToPrompt(req.messages ?? []);
}
export function anthropicPrompt(req: AnthropicMessagesRequest): string {
return anthropicMessagesToPrompt(req.system, req.messages ?? []);
}
export function writeOpenAIResponse(res: ServerResponse, model: string, text: string): void {
writeJson(res, 200, {
id: `chatcmpl-${randomUUID()}`,
object: 'chat.completion',
created: Math.floor(Date.now() / 1000),
model,
choices: [{ index: 0, message: { role: 'assistant', content: text }, finish_reason: 'stop' }],
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
});
}
export function startOpenAIStream(res: ServerResponse, model: string): (text: string) => void {
const id = `chatcmpl-${randomUUID()}`;
sseHeaders(res);
return (text: string) => {
res.write(`data: ${JSON.stringify({
id,
object: 'chat.completion.chunk',
created: Math.floor(Date.now() / 1000),
model,
choices: [{ index: 0, delta: { content: text }, finish_reason: null }],
})}\n\n`);
};
}
export function endOpenAIStream(res: ServerResponse): void {
res.write('data: [DONE]\n\n');
res.end();
}
export function writeAnthropicResponse(res: ServerResponse, model: string, text: string): void {
writeJson(res, 200, {
id: `msg_${randomUUID().replaceAll('-', '')}`,
type: 'message',
role: 'assistant',
model,
content: [{ type: 'text', text }],
stop_reason: 'end_turn',
stop_sequence: null,
usage: { input_tokens: 0, output_tokens: 0 },
});
}
export function startAnthropicStream(res: ServerResponse, model: string): (text: string) => void {
const id = `msg_${randomUUID().replaceAll('-', '')}`;
sseHeaders(res);
res.write(`event: message_start\ndata: ${JSON.stringify({
type: 'message_start',
message: { id, type: 'message', role: 'assistant', model, content: [], stop_reason: null, stop_sequence: null, usage: { input_tokens: 0, output_tokens: 0 } },
})}\n\n`);
res.write(`event: content_block_start\ndata: ${JSON.stringify({ type: 'content_block_start', index: 0, content_block: { type: 'text', text: '' } })}\n\n`);
return (text: string) => {
res.write(`event: content_block_delta\ndata: ${JSON.stringify({ type: 'content_block_delta', index: 0, delta: { type: 'text_delta', text } })}\n\n`);
};
}
export function endAnthropicStream(res: ServerResponse): void {
res.write(`event: content_block_stop\ndata: ${JSON.stringify({ type: 'content_block_stop', index: 0 })}\n\n`);
res.write(`event: message_delta\ndata: ${JSON.stringify({ type: 'message_delta', delta: { stop_reason: 'end_turn', stop_sequence: null }, usage: { output_tokens: 0 } })}\n\n`);
res.write(`event: message_stop\ndata: ${JSON.stringify({ type: 'message_stop' })}\n\n`);
res.end();
}
export function writeJson(res: ServerResponse, statusCode: number, value: unknown): void {
res.writeHead(statusCode, { 'content-type': 'application/json; charset=utf-8' });
res.end(JSON.stringify(value));
}
export function writeError(res: ServerResponse, statusCode: number, message: string): void {
writeJson(res, statusCode, { error: { type: 'api_error', message } });
}
function sseHeaders(res: ServerResponse): void {
res.writeHead(200, {
'content-type': 'text/event-stream; charset=utf-8',
'cache-control': 'no-cache, no-transform',
connection: 'keep-alive',
});
}

113
src/server.ts Normal file
View File

@@ -0,0 +1,113 @@
import { createServer, type IncomingMessage, type ServerResponse } from 'node:http';
import { loadConfig } from './config';
import { CodeBuddyPool } from './codebuddy';
import {
anthropicPrompt,
endAnthropicStream,
endOpenAIStream,
openAIPrompt,
startAnthropicStream,
startOpenAIStream,
writeAnthropicResponse,
writeError,
writeJson,
writeOpenAIResponse,
type AnthropicMessagesRequest,
type OpenAIChatRequest,
} from './protocols';
const config = loadConfig();
const pool = new CodeBuddyPool(config);
const server = createServer(async (req, res) => {
try {
if (!authorize(req)) {
writeError(res, 401, 'Unauthorized');
return;
}
const url = new URL(req.url ?? '/', `http://${req.headers.host ?? '127.0.0.1'}`);
if (req.method === 'GET' && url.pathname === '/health') {
writeJson(res, 200, { ok: true, accounts: config.accounts.length });
return;
}
if (req.method === 'GET' && url.pathname === '/debug/memory') {
writeJson(res, 200, { pid: process.pid, memory: process.memoryUsage() });
return;
}
if (req.method === 'GET' && url.pathname === '/v1/models') {
writeJson(res, 200, { object: 'list', data: [{ id: config.model ?? 'codebuddy', object: 'model', owned_by: 'codebuddy' }] });
return;
}
if (req.method === 'POST' && (url.pathname === '/v1/chat/completions' || url.pathname === '/chat/completions')) {
await handleOpenAI(req, res);
return;
}
if (req.method === 'POST' && url.pathname === '/v1/messages') {
await handleAnthropic(req, res);
return;
}
writeError(res, 404, 'Not found');
} catch (error) {
if (!res.headersSent) writeError(res, 500, error instanceof Error ? error.message : String(error));
else res.end();
}
});
server.listen(config.port, '127.0.0.1', () => {
console.log(`codebuddy2api listening on http://127.0.0.1:${config.port}`);
console.log(`accounts loaded: ${config.accounts.length}`);
});
process.on('SIGINT', shutdown);
process.on('SIGTERM', shutdown);
async function handleOpenAI(req: IncomingMessage, res: ServerResponse): Promise<void> {
const body = await readJson<OpenAIChatRequest>(req);
const model = body.model || config.model || 'codebuddy';
const prompt = openAIPrompt(body);
if (body.stream) {
const write = startOpenAIStream(res, model);
await pool.run(prompt, sdkModel(body.model), write);
endOpenAIStream(res);
return;
}
const text = await pool.run(prompt, sdkModel(body.model));
writeOpenAIResponse(res, model, text);
}
async function handleAnthropic(req: IncomingMessage, res: ServerResponse): Promise<void> {
const body = await readJson<AnthropicMessagesRequest>(req);
const model = body.model || config.model || 'codebuddy';
const prompt = anthropicPrompt(body);
if (body.stream) {
const write = startAnthropicStream(res, model);
await pool.run(prompt, sdkModel(body.model), write);
endAnthropicStream(res);
return;
}
const text = await pool.run(prompt, sdkModel(body.model));
writeAnthropicResponse(res, model, text);
}
function sdkModel(requestModel: string | undefined): string | undefined {
return config.passRequestModel ? requestModel : undefined;
}
function authorize(req: IncomingMessage): boolean {
if (!config.proxyApiKey) return true;
const header = req.headers.authorization ?? '';
return header === `Bearer ${config.proxyApiKey}`;
}
async function readJson<T>(req: IncomingMessage): Promise<T> {
const chunks: Buffer[] = [];
for await (const chunk of req) chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
if (chunks.length === 0) return {} as T;
return JSON.parse(Buffer.concat(chunks).toString('utf8')) as T;
}
function shutdown(): void {
pool.close().finally(() => server.close(() => process.exit(0)));
}

13
tsconfig.json Normal file
View File

@@ -0,0 +1,13 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "CommonJS",
"moduleResolution": "Node",
"outDir": "dist",
"rootDir": "src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true
},
"include": ["src/**/*.ts"]
}

60
抓包.md Normal file
View File

@@ -0,0 +1,60 @@
抓到了。CodeBuddy SDK/CLI 会走代理HTTPS 明文能被 `mitmdump` 解出来。
我新增了一个打码抓包脚本:
[srcipts/mitm-redact.py](/home/wolves/project/codebuddy2api/scripts/mitm-redact.py)
注意路径是 `scripts/mitm-redact.py`。它只记录摘要Authorization、API key、token、cookie 都会打码。
本次关键结果:
```text
POST https://copilot.tencent.com/v2/chat/completions
POST https://copilot.tencent.com/v2/report
```
主请求是 OpenAI Chat Completions 风格:
```json
{
"model": "glm-5.1",
"messages": [
{ "role": "system", "content": "..." },
{ "role": "user", "content": [{ "type": "text" }, { "type": "text" }] }
],
"tools": ["Agent", "Read", "Write", "Edit", "MultiEdit", "Bash", "Glob", "Grep", "EnterPlanMode", "ExitPlanMode", "TaskCreate", "TaskGet", "TaskUpdate", "TaskList", "WebFetch", "WebSearch", "TaskStop", "TaskOutput", "Skill", "AskUserQuestion"],
"temperature": 1,
"stream": true,
"stream_options": { "include_usage": true },
"reasoning_effort": "medium"
}
```
请求头关键项:
```text
X-API-Key: <redacted>
Authorization: <redacted>
X-Conversation-ID
X-Conversation-Request-ID
X-Conversation-Message-ID
X-Agent-Intent: craft
X-IDE-Type: CLI
X-IDE-Name: CLI
X-IDE-Version: 2.93.3
x-codebuddy-request: 1
User-Agent: CLI/2.93.3 CodeBuddy/2.93.3 ...
Content-Encoding: gzip
```
响应也是 OpenAI SSE chunk
```text
content-type: text/event-stream
object: chat.completion.chunk
model: glm-5.1-tencent
data: {...}
data: [DONE]
```
结论:低内存直连方向是可行的,至少主链路不是私有二进制协议,而是 `copilot.tencent.com/v2/chat/completions` 的 OpenAI-compatible SSE。下一步要做的是把当前 TS 中转从 SDK 改成直接调用这个上游:复刻必要 headers、gzip 请求、SSE 转发,并验证只用 `apikey` 能不能稳定鉴权。