-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Enable Langserve chat server and playground (#17)
* Enable Langserve * Use relative path * update readme * Use direct imports
- Loading branch information
1 parent
4687718
commit 6e1325c
Showing
9 changed files
with
131 additions
and
33 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
[build-system] | ||
requires = ["setuptools>=61.0"] | ||
build-backend = "setuptools.build_meta" | ||
|
||
[project] | ||
name = "llama4u" | ||
version = "0.1" | ||
authors = [ | ||
{name = "Viraj Malia", email="[email protected]"} | ||
] | ||
description = ''' | ||
Llama4U is a privacy-focused AI assistant developed using Ollama, LangChain and Llama3. | ||
''' | ||
readme = "README.md" | ||
requires-python = ">=3.7" | ||
classifiers = [ | ||
"Programming Language :: Python :: 3", | ||
"License :: MIT License", | ||
"Operating System :: OS Independent", | ||
] | ||
dependencies = [ | ||
"langchain", | ||
"langchain-core", | ||
"langchain-community", | ||
"langchain-chroma", | ||
"termcolor" | ||
] | ||
|
||
[project.scripts] | ||
llama4u = "llama4u:main" | ||
|
||
[project.urls] | ||
"Homepage" = "https://github.com/virajmalia/llama4u" | ||
"Bug Tracker" = "https://github.com/virajmalia/llama4u/issues" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
#!/usr/bin/env python | ||
""" Llama4U Server """ | ||
from typing import List, Union | ||
from fastapi import FastAPI | ||
from fastapi.middleware.cors import CORSMiddleware | ||
from langchain.pydantic_v1 import Field | ||
from langchain_core.runnables import RunnableLambda | ||
from langchain_core.messages import HumanMessage, AIMessage | ||
from langserve import CustomUserType | ||
from langserve.server import add_routes | ||
from app.src.llama4u import Llama4U | ||
|
||
app = FastAPI( | ||
title="LangChain Server", | ||
version="1.0", | ||
description="Spin up a simple api server using Langchain's Runnable interfaces", | ||
) | ||
|
||
# Set all CORS enabled origins | ||
app.add_middleware( | ||
CORSMiddleware, | ||
allow_origins=["*"], | ||
allow_credentials=True, | ||
allow_methods=["*"], | ||
allow_headers=["*"], | ||
expose_headers=["*"], | ||
) | ||
|
||
class ChatHistory(CustomUserType): | ||
""" Playground Chat Widget """ | ||
chat: List[Union[HumanMessage, AIMessage]] = \ | ||
Field(..., extra={"widget": {"type": "chat", "input": "chat"}}) | ||
|
||
def format_input(input_data: ChatHistory): | ||
""" Format input from the langserve UI """ | ||
chat_history = input_data.chat | ||
|
||
msg_history = [] | ||
for message in chat_history: | ||
if isinstance(message, HumanMessage): | ||
msg_history.append({"role": "user", "content": message.content}) | ||
elif isinstance(message, AIMessage): | ||
msg_history.append({"role": "assistant", "content": message.content}) | ||
|
||
input_data = {"input": msg_history} | ||
|
||
return input_data | ||
|
||
def format_output(response_data): | ||
""" Format output from the Chain for the langserve UI """ | ||
return response_data.content | ||
|
||
llama4u = Llama4U() | ||
input_formatter = RunnableLambda(format_input) | ||
chat_model = input_formatter | llama4u.with_msg_history | ||
add_routes( | ||
app, | ||
chat_model.with_types(input_type=ChatHistory, output_type=ChatHistory), | ||
config_keys=["configurable"], | ||
path="/llama4u", | ||
) | ||
|
||
if __name__ == "__main__": | ||
import uvicorn | ||
|
||
uvicorn.run(app, host="localhost") |
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,34 +1,23 @@ | ||
[build-system] | ||
requires = ["setuptools>=61.0"] | ||
build-backend = "setuptools.build_meta" | ||
|
||
[project] | ||
name = "llama4u" | ||
version = "0.1" | ||
authors = [ | ||
{name = "Viraj Malia", email="[email protected]"} | ||
] | ||
description = ''' | ||
Llama4U is a free AI solution that can be hosted locally, while providing online capabilities in a responsible and user-controllable way. | ||
''' | ||
[tool.poetry] | ||
name = "llama4u-server" | ||
version = "0.1.0" | ||
description = "" | ||
authors = ["Viraj Malia [email protected]"] | ||
readme = "README.md" | ||
requires-python = ">=3.7" | ||
classifiers = [ | ||
"Programming Language :: Python :: 3", | ||
"License :: MIT License", | ||
"Operating System :: OS Independent", | ||
] | ||
dependencies = [ | ||
"langchain", | ||
"langchain-core", | ||
"langchain-community", | ||
"langchain-chroma", | ||
"termcolor" | ||
packages = [ | ||
{ include = "app" }, | ||
] | ||
|
||
[project.scripts] | ||
llama4u = "llama4u:main" | ||
[tool.poetry.dependencies] | ||
python = "^3.11" | ||
uvicorn = "^0.23.2" | ||
langserve = {extras = ["server"], version = ">=0.0.30"} | ||
pydantic = "<2" | ||
|
||
[project.urls] | ||
"Homepage" = "https://github.com/virajmalia/llama4u" | ||
"Bug Tracker" = "https://github.com/virajmalia/llama4u/issues" | ||
|
||
[tool.poetry.group.dev.dependencies] | ||
langchain-cli = ">=0.0.15" | ||
|
||
[build-system] | ||
requires = ["poetry-core"] | ||
build-backend = "poetry.core.masonry.api" |