Skip to content

Commit

Permalink
feat(deeplx): cleaned up deeplx. moved most logic to models.py
Browse files Browse the repository at this point in the history
  • Loading branch information
littleblack111 committed Jul 16, 2024
1 parent 3d646d4 commit efccf65
Show file tree
Hide file tree
Showing 2 changed files with 80 additions and 88 deletions.
87 changes: 2 additions & 85 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from fastapi.responses import StreamingResponse

from app.middleware import AuthMiddleware
from app.models import DEFAULT_MODELS, MODELS_AVAILABLE, get_bot
from app.models import DEFAULT_MODELS, MODELS_AVAILABLE, get_bot, get_trans_bot
from app.sync import router as sync_router
from app.utils import (
ProxyRequest,
Expand Down Expand Up @@ -54,90 +54,7 @@ async def chat_completions(request: Request):
async def proxy_translations(request: Request):
translation_model = os.environ.get("TRANSLATION_MODEL")
if translation_model == "deeplx":
text = raycast_data["q"]
target_lang = raycast_data["target"]

if "source" in raycast_data:
source_lang = raycast_data["source"]

# if you enable this. this means, it will translate chinese to english and english to chinese regardless of your source and dest language.
# from regex import findall
# if len(findall(r'\p{Han}+', text)) >= 1 and target_lang == 'zh':
# raycast_data["target"] = "en"
# target_lang = raycast_data["target"]
# try:
# text.encode(encoding='utf-8').decode('ascii')
# except UnicodeDecodeError:
# pass
# else:
# if target_lang == 'en':
# raycast_data["target"] = "zh"
# target_lang = raycast_data["target"]


deeplx_base_url = os.environ.get("DEEPLX_BASE_URL")
deeplx_api_token = os.environ.get("DEEPLX_API_TOKEN")

if not deeplx_base_url:
return Response(
status_code=500,
content=json.dumps(
{
"error": {
"message": "No DEEPLX_BASE_URL provided",
}
}
),
)
text = text.replace('\n', '\n')
# if not deeplx_api_token:
# deeplx_api_token = ""
# deeplHeader = {"Authorization": f"Bearer {deeplx_api_token}"}
body = {
"text": text,
"target_lang": target_lang,
}
if "source" in raycast_data:
body["source_lang"] = source_lang

try:
req = ProxyRequest(
deeplx_base_url, "POST", '', json.dumps(body), query_params={}
# deeplx_base_url, "POST", headers, json.dumps(body), query_params={}
)
resp = await pass_through_request(http_client, req, nohttps=True, noheaders=True)
print('a')
resp = json.loads(resp.content.decode("utf-8"))
try:
# translated_text = resp["alternatives"][0]
translated_text = resp["data"]
# translated_text = translated_text.replace('\\n', '\n')
# print(translated_text)
res = {"data": {"translations": [{"translatedText": translated_text}]}}
except TypeError:
# res = {"error": {"message": "Failed to translate"}}
# res = {"data": {"translations": [{"translatedText": "Failed to translate"}]}}
logger.warn(f'Text failed to translate: {text}, DEBUG: {translated_text}')
res = {"data": {"translations": [{"translatedText": text}]}}

if "source" not in raycast_data:
res["data"]["translations"][0]["detectedSourceLanguage"] = resp[
"source_lang"
].lower()

return Response(status_code=200, content=json.dumps(res))
except Exception as e:
logger.error(f"DEEPLX error: {e}")
return Response(
status_code=500,
content=json.dumps(
{
"error": {
"message": "Unknown error",
}
}
),
)
return await get_trans_bot(translation_model).translate_completions(raycast_data)
else:
raycast_data = await request.json()
result = []
Expand Down
81 changes: 78 additions & 3 deletions app/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,19 @@
import logging
import os

import httpx
import anthropic
import google.generativeai as genai
import openai
from google.generativeai import GenerativeModel

from app.utils import json_dumps
from app.utils import json_dumps, pass_through_request, ProxyRequest

logger = logging.getLogger(__name__)

MAX_TOKENS = os.environ.get("MAX_TOKENS", 1024)

http_client = httpx.AsyncClient(verify=False)

class ChatBotAbc(abc.ABC):

Expand Down Expand Up @@ -560,34 +562,107 @@ def get_models(self):
]
return {"default_models": default_models, "models": models}

class translationBot(ChatBotAbc):
@classmethod
def is_start_available(cls):
return os.environ.get("DEEPLX_BASE_URL"); os.environ.get("DEEPLX_API_TOKEN"); False

def translate_completions(self, raycast_data: dict):
return self._transDeeplx(raycast_data["q"], raycast_data.get("source"), raycast_data["target"])

async def _transDeeplx(self, text, source, target):
url = os.environ.get("DEEPLX_BASE_URL")
deeplx_base_url = os.environ.get("DEEPLX_BASE_URL")
deeplx_api_token = os.environ.get("DEEPLX_API_TOKEN")

text = text.replace('\n', '\n')
# if not deeplx_api_token:
# deeplx_api_token = ""
# deeplHeader = {"Authorization": f"Bearer {deeplx_api_token}"}
body = { "text": text,
"target_lang": target,
}
if source:
body["source_lang"] = source

try:
req = ProxyRequest(
deeplx_base_url, "POST", '', json.dumps(body), query_params={}
# deeplx_base_url, "POST", headers, json.dumps(body), query_params={}
)
resp = await pass_through_request(http_client, req, nohttps=True, noheaders=True)
resp = json.loads(resp.content.decode("utf-8"))
try:
# translated_text = resp["alternatives"][0]
translated_text = resp["data"]
# translated_text = translated_text.replace('\\n', '\n')
# print(translated_text)
res = {"data": {"translations": [{"translatedText": translated_text}]}}
except TypeError:
# res = {"error": {"message": "Failed to translate"}}
# res = {"data": {"translations": [{"translatedText": "Failed to translate"}]}}
logger.warn(f'Text failed to translate: {text}, DEBUG: {translated_text}')
res = {"data": {"translations": [{"translatedText": text}]}}

if not source:
res["data"]["translations"][0]["detectedSourceLanguage"] = resp["source_lang"].lower()

return json.dumps(res)
except Exception as e:
logger.error(f"DEEPLX error: {e}")


async def chat_completions(self, raycast_data: dict):
messages = self.__build_messages(raycast_data)
model = raycast_data["model"]
temperature = os.environ.get("TEMPERATURE", 0.5)


MODELS_DICT = {}
MODELS_AVAILABLE = []
DEFAULT_MODELS = {}

MODELS_TRANS_DICT = {}
MODELS_TRANS_AVAILABLE = []
DEFAULT_TRANS_MODELS = {}

if GeminiChatBot.is_start_available():
logger.info("Google API is available")
_bot = GeminiChatBot()
_models = _bot.get_models()
MODELS_AVAILABLE.extend(_models["models"])
DEFAULT_MODELS = _models["default_models"]
MODELS_DICT.update({model["model"]: _bot for model in _models["models"]})
elif OpenAIChatBot.is_start_available():
if OpenAIChatBot.is_start_available():
logger.info("OpenAI API is available")
_bot = OpenAIChatBot()
_models = _bot.get_models()
MODELS_AVAILABLE.extend(_models["models"])
DEFAULT_MODELS.update(_models["default_models"])
MODELS_DICT.update({model["model"]: _bot for model in _models["models"]})
elif AnthropicChatBot.is_start_available():
if AnthropicChatBot.is_start_available():
logger.info("Anthropic API is available")
_bot = AnthropicChatBot()
_models = _bot.get_models()
MODELS_AVAILABLE.extend(_models["models"])
DEFAULT_MODELS.update(_models["default_models"])
MODELS_DICT.update({model["model"]: _bot for model in _models["models"]})

if translationBot.is_start_available():
logger.info("DeepL API is available")
_bot = translationBot()
_models = _bot.get_models()
MODELS_TRANS_AVAILABLE.extend(_models["models"])
DEFAULT_TRANS_MODELS.update(_models["default_models"])
MODELS_TRANS_DICT.update({model["model"]: _bot for model in _models["models"]})


def get_bot(model_id):
if not model_id:
return next(iter(MODELS_DICT.values()))
return MODELS_DICT.get(model_id)

def get_trans_bot(model_trans_id):
if not model_trans_id:
return next(iter(MODELS_DICT.values()))
return MODELS_TRANS_DICT.get(model_trans_id)

0 comments on commit efccf65

Please sign in to comment.