pub struct OpenAiProvider {
base_url: String,
credential: Option<String>,
max_tokens_override: Option<u32>,
}
impl OpenAiProvider {
pub fn new(base_url: impl Into<String>, api_key: Option<String>) -> Self {
Self {
base_url: base_url.into(),
credential: api_key,
max_tokens_override: None,
}
}
}
#[async_trait]
impl Provider for OpenAiProvider {
fn capabilities(&self) -> ProviderCapabilities {
ProviderCapabilities {
native_tool_calling: true,
vision: true,
}
}
fn convert_tools(&self, tools: &[ToolSpec]) -> ToolsPayload {
let native_tools: Vec<_> = tools
.iter()
.map(|spec| {
serde_json::json!({
"type": "function",
"function": {
"name": spec.name,
"description": spec.description,
"parameters": spec.parameters,
}
})
})
.collect();
ToolsPayload::OpenAI { tools: native_tools }
}
async fn chat_with_system(
&self,
system_prompt: Option<&str>,
message: &str,
model: &str,
temperature: f64,
) -> anyhow::Result<String> {
let mut messages = Vec::new();
if let Some(sys) = system_prompt {
messages.push(Message {
role: "system".into(),
content: sys.into(),
});
}
messages.push(Message {
role: "user".into(),
content: message.into(),
});
let client = Client::new();
let request = ChatRequest {
model: model.to_string(),
messages,
temperature,
max_tokens: self.max_tokens_override,
};
let response = client
.post(format!("{}/chat/completions", self.base_url))
.header("Authorization", format!("Bearer {}", self.credential.as_deref().unwrap_or("")))
.json(&request)
.send()
.await?
.json::<ChatResponse>()
.await?;
let content = response
.choices
.first()
.and_then(|c| c.message.content.as_deref())
.unwrap_or("");
Ok(content.to_string())
}
async fn chat(
&self,
request: ChatRequest<'_>,
model: &str,
temperature: f64,
) -> anyhow::Result<ChatResponse> {
// Convert tools to OpenAI format
let tools = request.tools.map(|tools| {
match self.convert_tools(tools) {
ToolsPayload::OpenAI { tools } => tools,
_ => vec![],
}
});
// Build native request with tool definitions
let native_request = NativeChatRequest {
model: model.to_string(),
messages: convert_messages(request.messages),
temperature,
max_tokens: self.max_tokens_override,
tools,
tool_choice: None,
};
// Send to API and parse response
let response = self.send_request(native_request).await?;
// Extract tool calls
let tool_calls = response.choices
.first()
.and_then(|c| c.message.tool_calls.as_ref())
.map(|calls| {
calls.iter().map(|tc| ToolCall {
id: tc.id.clone().unwrap_or_default(),
name: tc.function.name.clone(),
arguments: tc.function.arguments.clone(),
}).collect()
})
.unwrap_or_default();
Ok(ChatResponse {
text: response.choices.first().and_then(|c| c.message.content.clone()),
tool_calls,
usage: response.usage.map(|u| TokenUsage {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
}),
reasoning_content: None,
quota_metadata: None,
stop_reason: response.choices.first()
.and_then(|c| c.finish_reason.as_deref())
.map(NormalizedStopReason::from_openai_finish_reason),
raw_stop_reason: response.choices.first()
.and_then(|c| c.finish_reason.clone()),
})
}
}