pub fn convert_gemini_to_openai(
response: GeminiResponse,
model: &str,
) -> Result<OpenAIResponse, AppError> {
let candidate = response.candidates
.first()
.ok_or(AppError::InvalidResponse("No candidates"))?;
let content = candidate.content.parts.iter()
.filter_map(|part| part.text.clone())
.collect::<Vec<_>>()
.join("");
// Extract tool calls
let tool_calls = candidate.content.parts.iter()
.filter_map(|part| part.function_call.as_ref())
.enumerate()
.map(|(i, fc)| OpenAIToolCall {
id: format!("call_{}", i),
type_: "function".to_string(),
function: OpenAIFunction {
name: fc.name.clone(),
arguments: serde_json::to_string(&fc.args).unwrap(),
},
})
.collect::<Vec<_>>();
// Calculate token usage
let usage = OpenAIUsage {
prompt_tokens: response.usage_metadata.prompt_token_count,
completion_tokens: response.usage_metadata.candidates_token_count,
total_tokens: response.usage_metadata.total_token_count,
};
Ok(OpenAIResponse {
id: format!("chatcmpl-{}", uuid::Uuid::new_v4()),
object: "chat.completion".to_string(),
created: chrono::Utc::now().timestamp(),
model: model.to_string(),
choices: vec![OpenAIChoice {
index: 0,
message: OpenAIMessage {
role: "assistant".to_string(),
content: if content.is_empty() { None } else { Some(content) },
tool_calls: if tool_calls.is_empty() { None } else { Some(tool_calls) },
},
finish_reason: map_finish_reason(&candidate.finish_reason),
}],
usage: Some(usage),
})
}