Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: deepseek r1 alternative tool calling format #975

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
117 changes: 116 additions & 1 deletion crates/goose/src/providers/openrouter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ use mcp_core::tool::Tool;

pub const OPENROUTER_DEFAULT_MODEL: &str = "anthropic/claude-3.5-sonnet";
pub const OPENROUTER_MODEL_PREFIX_ANTHROPIC: &str = "anthropic";
pub const OPENROUTER_MODEL_PREFIX_DEEPSEEK: &str = "deepseek-r1";

// OpenRouter can run many models, we suggest the default
pub const OPENROUTER_KNOWN_MODELS: &[&str] = &[OPENROUTER_DEFAULT_MODEL];
Expand Down Expand Up @@ -74,6 +75,23 @@ impl OpenRouterProvider {

handle_response_openai_compat(response).await
}

// Extract tools from system message if it contains a tools section
fn extract_tools_from_system(system: &str) -> Option<Vec<Tool>> {
if let Some(tools_start) = system.find("<functions>") {
if let Some(tools_end) = system.find("</functions>") {
let tools_text = &system[tools_start..=tools_end + 11]; // +11 to include "</functions>"
match serde_json::from_str::<Vec<Tool>>(tools_text) {
Ok(tools) => return Some(tools),
Err(e) => {
tracing::warn!("Failed to parse tools from system message: {}", e);
return None;
}
}
}
}
None
}
}

/// Update the request when using anthropic model.
Expand Down Expand Up @@ -132,6 +150,79 @@ fn update_request_for_anthropic(original_payload: &Value) -> Value {
payload
}

fn update_request_for_deepseek(original_payload: &Value) -> Value {
let mut payload = original_payload.clone();

// Extract tools before removing them from the payload
let tools = payload.get("tools").cloned();

if let Some(messages_spec) = payload
.as_object_mut()
.and_then(|obj| obj.get_mut("messages"))
.and_then(|messages| messages.as_array_mut())
{
// Add "cache_control" to the last and second-to-last "user" messages

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

cache_control is just for anthropic models

let mut user_count = 0;
for message in messages_spec.iter_mut().rev() {
if message.get("role") == Some(&json!("user")) {
if let Some(content) = message.get_mut("content") {
if let Some(content_str) = content.as_str() {
*content = json!([{
"type": "text",
"text": content_str,
"cache_control": { "type": "ephemeral" }
}]);
}
}
user_count += 1;
if user_count >= 2 {
break;
}
}
}

// Update the system message to include tools and have cache_control field
if let Some(system_message) = messages_spec
.iter_mut()
.find(|msg| msg.get("role") == Some(&json!("system")))
{
if let Some(content) = system_message.get_mut("content") {
if let Some(content_str) = content.as_str() {
let system_content = if let Some(tools) = tools {
// Format tools as a string
let tools_str = serde_json::to_string_pretty(&tools)
.unwrap_or_else(|_| "[]".to_string());
format!(
"{}\n\nYou have access to the following tools:\n<functions>\n{}\n</functions>\n\nTo use a tool, respond with a message containing an XML-like function call block like this:\n<function_calls>\n<invoke name=\"tool_name\">\n<parameter name=\"param_name\">value</parameter>\n</invoke>\n</function_calls>",
content_str,
tools_str
)
} else {
content_str.to_string()
};

*system_message = json!({
"role": "system",
"content": [{
"type": "text",
"text": system_content,
"cache_control": { "type": "ephemeral" }
}]
});
}
}
}
}

// Remove any tools/function calling capabilities from the request
if let Some(obj) = payload.as_object_mut() {
obj.remove("tools");
obj.remove("tool_choice");
}

payload
}

fn create_request_based_on_model(
model_config: &ModelConfig,
system: &str,
Expand All @@ -146,13 +237,22 @@ fn create_request_based_on_model(
&super::utils::ImageFormat::OpenAi,
)?;

// Check for Anthropic models
if model_config
.model_name
.starts_with(OPENROUTER_MODEL_PREFIX_ANTHROPIC)
{
payload = update_request_for_anthropic(&payload);
}

// Check for DeepSeek models
if model_config
.model_name
.contains(OPENROUTER_MODEL_PREFIX_DEEPSEEK)
{
payload = update_request_for_deepseek(&payload);
}

Ok(payload)
}

Expand Down Expand Up @@ -195,14 +295,29 @@ impl Provider for OpenRouterProvider {
messages: &[Message],
tools: &[Tool],
) -> Result<(Message, ProviderUsage), ProviderError> {
// Try to extract tools from system message if available
let effective_tools = if let Some(system_tools) = Self::extract_tools_from_system(system) {
system_tools
} else {
tools.to_vec()
};
Comment on lines +299 to +303

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why do you need to parse tools from the system prompt if the tools get translated into strings in the system prompt below in create_request_based_on_model ?


// Create the base payload
let payload = create_request_based_on_model(&self.model, system, messages, tools)?;
let payload =
create_request_based_on_model(&self.model, system, messages, &effective_tools)?;

// Make request
let response = self.post(payload.clone()).await?;

// Parse response
let message = response_to_message(response.clone())?;

// Debug log the response structure
println!(
"OpenRouter response: {}",
serde_json::to_string_pretty(&response).unwrap_or_default()
);

let usage = match get_usage(&response) {
Ok(usage) => usage,
Err(ProviderError::UsageError(e)) => {
Expand Down
Loading