105 lines
3.8 KiB
TypeScript
105 lines
3.8 KiB
TypeScript
import { randomUUID } from 'node:crypto';
|
|
import type { ServerResponse } from 'node:http';
|
|
import { anthropicMessagesToPrompt, openAIChatToPrompt, type ChatMessage } from './prompt';
|
|
|
|
export type OpenAIChatRequest = {
|
|
model?: string;
|
|
messages?: ChatMessage[];
|
|
stream?: boolean;
|
|
};
|
|
|
|
export type AnthropicMessagesRequest = {
|
|
model?: string;
|
|
system?: unknown;
|
|
messages?: ChatMessage[];
|
|
stream?: boolean;
|
|
};
|
|
|
|
export function openAIPrompt(req: OpenAIChatRequest): string {
|
|
return openAIChatToPrompt(req.messages ?? []);
|
|
}
|
|
|
|
export function anthropicPrompt(req: AnthropicMessagesRequest): string {
|
|
return anthropicMessagesToPrompt(req.system, req.messages ?? []);
|
|
}
|
|
|
|
export function writeOpenAIResponse(res: ServerResponse, model: string, text: string): void {
|
|
writeJson(res, 200, {
|
|
id: `chatcmpl-${randomUUID()}`,
|
|
object: 'chat.completion',
|
|
created: Math.floor(Date.now() / 1000),
|
|
model,
|
|
choices: [{ index: 0, message: { role: 'assistant', content: text }, finish_reason: 'stop' }],
|
|
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
|
|
});
|
|
}
|
|
|
|
export function startOpenAIStream(res: ServerResponse, model: string): (text: string) => void {
|
|
const id = `chatcmpl-${randomUUID()}`;
|
|
sseHeaders(res);
|
|
return (text: string) => {
|
|
res.write(`data: ${JSON.stringify({
|
|
id,
|
|
object: 'chat.completion.chunk',
|
|
created: Math.floor(Date.now() / 1000),
|
|
model,
|
|
choices: [{ index: 0, delta: { content: text }, finish_reason: null }],
|
|
})}\n\n`);
|
|
};
|
|
}
|
|
|
|
export function endOpenAIStream(res: ServerResponse): void {
|
|
res.write('data: [DONE]\n\n');
|
|
res.end();
|
|
}
|
|
|
|
export function writeAnthropicResponse(res: ServerResponse, model: string, text: string): void {
|
|
writeJson(res, 200, {
|
|
id: `msg_${randomUUID().replaceAll('-', '')}`,
|
|
type: 'message',
|
|
role: 'assistant',
|
|
model,
|
|
content: [{ type: 'text', text }],
|
|
stop_reason: 'end_turn',
|
|
stop_sequence: null,
|
|
usage: { input_tokens: 0, output_tokens: 0 },
|
|
});
|
|
}
|
|
|
|
export function startAnthropicStream(res: ServerResponse, model: string): (text: string) => void {
|
|
const id = `msg_${randomUUID().replaceAll('-', '')}`;
|
|
sseHeaders(res);
|
|
res.write(`event: message_start\ndata: ${JSON.stringify({
|
|
type: 'message_start',
|
|
message: { id, type: 'message', role: 'assistant', model, content: [], stop_reason: null, stop_sequence: null, usage: { input_tokens: 0, output_tokens: 0 } },
|
|
})}\n\n`);
|
|
res.write(`event: content_block_start\ndata: ${JSON.stringify({ type: 'content_block_start', index: 0, content_block: { type: 'text', text: '' } })}\n\n`);
|
|
return (text: string) => {
|
|
res.write(`event: content_block_delta\ndata: ${JSON.stringify({ type: 'content_block_delta', index: 0, delta: { type: 'text_delta', text } })}\n\n`);
|
|
};
|
|
}
|
|
|
|
export function endAnthropicStream(res: ServerResponse): void {
|
|
res.write(`event: content_block_stop\ndata: ${JSON.stringify({ type: 'content_block_stop', index: 0 })}\n\n`);
|
|
res.write(`event: message_delta\ndata: ${JSON.stringify({ type: 'message_delta', delta: { stop_reason: 'end_turn', stop_sequence: null }, usage: { output_tokens: 0 } })}\n\n`);
|
|
res.write(`event: message_stop\ndata: ${JSON.stringify({ type: 'message_stop' })}\n\n`);
|
|
res.end();
|
|
}
|
|
|
|
export function writeJson(res: ServerResponse, statusCode: number, value: unknown): void {
|
|
res.writeHead(statusCode, { 'content-type': 'application/json; charset=utf-8' });
|
|
res.end(JSON.stringify(value));
|
|
}
|
|
|
|
export function writeError(res: ServerResponse, statusCode: number, message: string): void {
|
|
writeJson(res, statusCode, { error: { type: 'api_error', message } });
|
|
}
|
|
|
|
function sseHeaders(res: ServerResponse): void {
|
|
res.writeHead(200, {
|
|
'content-type': 'text/event-stream; charset=utf-8',
|
|
'cache-control': 'no-cache, no-transform',
|
|
connection: 'keep-alive',
|
|
});
|
|
}
|