feat: provider abstraction layer + Grok API support

This commit is contained in:
Yeachan-Heo
2026-04-01 04:10:46 +00:00
parent cbc0a83059
commit 2a0f4b677a
8 changed files with 1547 additions and 999 deletions

View File

@@ -4,9 +4,10 @@ use std::process::Command;
use std::time::{Duration, Instant};
use api::{
read_base_url, AnthropicClient, ContentBlockDelta, InputContentBlock, InputMessage,
MessageRequest, MessageResponse, OutputContentBlock, StreamEvent as ApiStreamEvent, ToolChoice,
ToolDefinition, ToolResultContentBlock,
detect_provider_kind, max_tokens_for_model, resolve_model_alias, ContentBlockDelta,
InputContentBlock, InputMessage, MessageRequest, MessageResponse, OutputContentBlock,
ProviderClient, ProviderKind, StreamEvent as ApiStreamEvent, ToolChoice, ToolDefinition,
ToolResultContentBlock,
};
use reqwest::blocking::Client;
use runtime::{
@@ -1459,14 +1460,14 @@ fn run_agent_job(job: &AgentJob) -> Result<(), String> {
fn build_agent_runtime(
job: &AgentJob,
) -> Result<ConversationRuntime<AnthropicRuntimeClient, SubagentToolExecutor>, String> {
) -> Result<ConversationRuntime<ProviderRuntimeClient, SubagentToolExecutor>, String> {
let model = job
.manifest
.model
.clone()
.unwrap_or_else(|| DEFAULT_AGENT_MODEL.to_string());
let allowed_tools = job.allowed_tools.clone();
let api_client = AnthropicRuntimeClient::new(model, allowed_tools.clone())?;
let api_client = ProviderRuntimeClient::new(model, allowed_tools.clone())?;
let tool_executor = SubagentToolExecutor::new(allowed_tools);
Ok(ConversationRuntime::new(
Session::new(),
@@ -1635,18 +1636,21 @@ fn format_agent_terminal_output(status: &str, result: Option<&str>, error: Optio
sections.join("")
}
struct AnthropicRuntimeClient {
struct ProviderRuntimeClient {
runtime: tokio::runtime::Runtime,
client: AnthropicClient,
client: ProviderClient,
model: String,
allowed_tools: BTreeSet<String>,
}
impl AnthropicRuntimeClient {
impl ProviderRuntimeClient {
fn new(model: String, allowed_tools: BTreeSet<String>) -> Result<Self, String> {
let client = AnthropicClient::from_env()
.map_err(|error| error.to_string())?
.with_base_url(read_base_url());
let model = resolve_model_alias(&model).to_string();
let client = match detect_provider_kind(&model) {
ProviderKind::Anthropic | ProviderKind::Xai | ProviderKind::OpenAi => {
ProviderClient::from_model(&model).map_err(|error| error.to_string())?
}
};
Ok(Self {
runtime: tokio::runtime::Runtime::new().map_err(|error| error.to_string())?,
client,
@@ -1656,7 +1660,7 @@ impl AnthropicRuntimeClient {
}
}
impl ApiClient for AnthropicRuntimeClient {
impl ApiClient for ProviderRuntimeClient {
fn stream(&mut self, request: ApiRequest) -> Result<Vec<AssistantEvent>, RuntimeError> {
let tools = tool_specs_for_allowed_tools(Some(&self.allowed_tools))
.into_iter()
@@ -1668,7 +1672,7 @@ impl ApiClient for AnthropicRuntimeClient {
.collect::<Vec<_>>();
let message_request = MessageRequest {
model: self.model.clone(),
max_tokens: 32_000,
max_tokens: max_tokens_for_model(&self.model),
messages: convert_messages(&request.messages),
system: (!request.system_prompt.is_empty()).then(|| request.system_prompt.join("\n\n")),
tools: (!tools.is_empty()).then_some(tools),