diff --git a/app/models.py b/app/models.py index 265aa73..0674ae5 100644 --- a/app/models.py +++ b/app/models.py @@ -277,6 +277,10 @@ async def __chat(self, messages, model, temperature, **kwargs): # stream = "tools" not in kwargs stream = "stream" in kwargs and kwargs["stream"] try: + for m in messages: + # check model is o1 replace role system to user + if model.startswith("o1") and m.get("role") == "system": + m["role"] = "user" logger.debug(f"openai chat stream: {stream}") res = await self.openai_client.chat.completions.create( model=model, @@ -306,44 +310,44 @@ def get_models(self): default_models = _get_default_model_dict("openai-gpt-4o-mini") models = [ { - "id": "openai-o1-mini", - "model": "o1-mini", - "name": "o1 mini", + "id": "openai-gpt-4o-mini", + "model": "gpt-4o-mini", + "name": "GPT-4o Mini", "provider": "openai", "provider_name": "OpenAI", "provider_brand": "openai", "context": 16, - **_get_model_extra_info("o1-mini"), + **_get_model_extra_info("gpt-4o-mini"), }, { - "id": "openai-o1-preview", - "model": "o1-preview", - "name": "o1 Preview", + "id": "openai-gpt-4o", + "model": "gpt-4o", + "name": "GPT-4o", "provider": "openai", "provider_name": "OpenAI", "provider_brand": "openai", - "context": 16, - **_get_model_extra_info("o1-preview"), + "context": 8, + **_get_model_extra_info("gpt-4o"), }, { - "id": "openai-gpt-4o-mini", - "model": "gpt-4o-mini", - "name": "GPT-4o Mini", + "id": "openai-o1-mini", + "model": "o1-mini", + "name": "o1 mini", "provider": "openai", "provider_name": "OpenAI", "provider_brand": "openai", "context": 16, - **_get_model_extra_info("gpt-4o-mini"), + **_get_model_extra_info("o1-mini"), }, { - "id": "openai-gpt-4o", - "model": "gpt-4o", - "name": "GPT-4o", + "id": "openai-o1-preview", + "model": "o1-preview", + "name": "o1 Preview", "provider": "openai", "provider_name": "OpenAI", "provider_brand": "openai", - "context": 8, - **_get_model_extra_info("gpt-4o"), + "context": 16, + **_get_model_extra_info("o1-preview"), }, { "id": "openai-gpt-4-turbo",