From 492dca389e82694cb23133c1af629896ed9a00be Mon Sep 17 00:00:00 2001 From: Sylvain Cau Date: Thu, 23 Jan 2025 20:52:06 -0800 Subject: [PATCH] LiteLLM: Add support for api_key and base_url (#1746) --- mem0/configs/llms/base.py | 4 ++++ mem0/llms/litellm.py | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/mem0/configs/llms/base.py b/mem0/configs/llms/base.py index 78ad13b17b..2bbeb698dc 100644 --- a/mem0/configs/llms/base.py +++ b/mem0/configs/llms/base.py @@ -27,6 +27,8 @@ def __init__( openai_base_url: Optional[str] = None, site_url: Optional[str] = None, app_name: Optional[str] = None, + # LiteLLM specific + litellm_base_url: Optional[str] = None, # Ollama specific ollama_base_url: Optional[str] = None, # AzureOpenAI specific @@ -63,6 +65,8 @@ def __init__( :type site_url: Optional[str], optional :param app_name: Openrouter app name to use, defaults to None :type app_name: Optional[str], optional + :param litellm_base_url: The base URL of the LLM, defaults to None + :type litellm_base_url: Optional[str], optional :param ollama_base_url: The base URL of the LLM, defaults to None :type ollama_base_url: Optional[str], optional :param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1" diff --git a/mem0/llms/litellm.py b/mem0/llms/litellm.py index d5896ff80b..8ba6c99422 100644 --- a/mem0/llms/litellm.py +++ b/mem0/llms/litellm.py @@ -1,4 +1,5 @@ import json +import os from typing import Dict, List, Optional try: @@ -69,12 +70,17 @@ def generate_response( if not litellm.supports_function_calling(self.config.model): raise ValueError(f"Model '{self.config.model}' in litellm does not support function calling.") + api_key = self.config.api_key or os.getenv("LITELLM_PROXY_API_KEY") + base_url = self.config.litellm_base_url or os.getenv("LITELLM_PROXY_API_BASE") + params = { "model": self.config.model, "messages": messages, "temperature": self.config.temperature, "max_tokens": self.config.max_tokens, "top_p": self.config.top_p, + "api_key": api_key, + "base_url": base_url, } if response_format: params["response_format"] = response_format