Skip to content

Commit

Permalink
Merge pull request #1765 from codestoryai/features/add-support-for-de…
Browse files Browse the repository at this point in the history
…epseek-r1

features/add support for deepseek r1
  • Loading branch information
theskcd authored Jan 20, 2025
2 parents 2d45fdd + 2339287 commit 003e83d
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 4 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 10 additions & 2 deletions llm_client/src/clients/codestory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,8 @@ impl CodeStoryClient {
LLMType::O1 => Ok("o1".to_owned()), // o1
LLMType::O1Mini => Ok("o1-mini".to_owned()), // o1 mini
LLMType::DeepSeekCoderV3 => Ok("deepseek/deepseek-chat".to_owned()),
LLMType::DeepSeekR1 => Ok("deepseek/deepseek-r1".to_owned()),
LLMType::Custom(custom) => Ok(custom.to_owned()),
_ => Err(LLMClientError::UnSupportedModel),
}
}
Expand Down Expand Up @@ -336,7 +338,9 @@ impl LLMClient for CodeStoryClient {
request: LLMClientCompletionRequest,
) -> Result<String, LLMClientError> {
let (sender, _receiver) = tokio::sync::mpsc::unbounded_channel();
self.stream_completion(api_key, request, sender).await.map(|answer| answer.answer_up_until_now().to_owned())
self.stream_completion(api_key, request, sender)
.await
.map(|answer| answer.answer_up_until_now().to_owned())
}

async fn stream_completion(
Expand Down Expand Up @@ -386,7 +390,11 @@ impl LLMClient for CodeStoryClient {
}
}
}
Ok(LLMClientCompletionResponse::new(buffered_stream, None, model))
Ok(LLMClientCompletionResponse::new(
buffered_stream,
None,
model,
))
}

async fn stream_prompt_completion(
Expand Down
4 changes: 4 additions & 0 deletions llm_client/src/clients/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ pub enum LLMType {
DeepSeekCoderV2,
/// DeepSeek Coder V3 model
DeepSeekCoderV3,
/// DeepSeek r1
DeepSeekR1,
/// CodeLLama 70B Instruct model
CodeLLama70BInstruct,
/// CodeLlama 13B Instruct model
Expand Down Expand Up @@ -135,6 +137,7 @@ impl<'de> Deserialize<'de> for LLMType {
"gemini-2.0-flash-thinking-exp-1219" => {
Ok(LLMType::Gemini2_0FlashThinkingExperimental)
}
"deepseek/deepseek-r1" => Ok(LLMType::DeepSeekR1),
"Llama3_8bInstruct" => Ok(LLMType::Llama3_8bInstruct),
"Llama3_1_8bInstruct" => Ok(LLMType::Llama3_1_8bInstruct),
"Llama3_1_70bInstruct" => Ok(LLMType::Llama3_1_70bInstruct),
Expand Down Expand Up @@ -231,6 +234,7 @@ impl fmt::Display for LLMType {
LLMType::Gemini2_0FlashThinkingExperimental => {
write!(f, "gemini-2.0-flash-thinking-exp-1219")
}
LLMType::DeepSeekR1 => write!(f, "deepseek/deepseek-r1"),
LLMType::Gpt4O => write!(f, "Gpt4O"),
LLMType::GeminiProFlash => write!(f, "GeminiProFlash"),
LLMType::DeepSeekCoderV2 => write!(f, "DeepSeekCoderV2"),
Expand Down
2 changes: 1 addition & 1 deletion sidecar/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "sidecar"
version = "0.1.18"
version = "0.1.19"
edition = "2021"
build = "build.rs"

Expand Down

0 comments on commit 003e83d

Please sign in to comment.