Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Gml 1721 emit agent steps #210

Merged
merged 17 commits into from
Jun 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions common/py_schemas/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,9 @@ class QueryUpsertRequest(BaseModel):
query_info: Optional[GSQLQueryInfo]


class Role(enum.Enum):
system = enum.auto()
user = enum.auto()
class Role(enum.StrEnum):
SYSTEM = enum.auto()
USER = enum.auto()


class Message(BaseModel):
Expand All @@ -144,3 +144,13 @@ class Message(BaseModel):
response_time: Optional[float] = None # time in fractional seconds
feedback: Optional[int] = None
comment: Optional[str] = None


class ResponseType(enum.StrEnum):
PROGRESS = enum.auto()
MESSAGE = enum.auto()


class AgentProgess(BaseModel):
content: str
response_type: ResponseType
18 changes: 12 additions & 6 deletions copilot-ui/src/actions/ActionProvider.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import {createClientMessage} from 'react-chatbot-kit';
import useWebSocket, {ReadyState} from 'react-use-websocket';
import Loader from '../components/Loader';

const WS_URL = 'ws://0.0.0.0:8000/ui/Demo_Graph1/chat';
const WS_URL = "ws://0.0.0.0:8000/ui/Demo_Graph1/chat";

interface ActionProviderProps {
createChatBotMessage: any;
Expand All @@ -30,10 +30,16 @@ export interface Message {
comment: string;
}

const ActionProvider: React.FC<ActionProviderProps> = ({createChatBotMessage, setState, children}) => {
const ActionProvider: React.FC<ActionProviderProps> = ({
createChatBotMessage,
setState,
children,
}) => {
const [socketUrl, setSocketUrl] = useState(WS_URL);
const [messageHistory, setMessageHistory] = useState<MessageEvent<Message>[]>([]);
const {sendMessage, lastMessage, readyState} = useWebSocket(socketUrl);
const [messageHistory, setMessageHistory] = useState<MessageEvent<Message>[]>(
[],
);
const { sendMessage, lastMessage, readyState } = useWebSocket(socketUrl);

// eslint-disable-next-line
// @ts-ignore
Expand All @@ -43,8 +49,8 @@ const ActionProvider: React.FC<ActionProviderProps> = ({createChatBotMessage, se

useWebSocket(WS_URL, {
onOpen: () => {
queryCopilotWs2('dXNlcl8yOlRoaXNpc3RoZWFkbWluITE=');
console.log('WebSocket connection established.');
queryCopilotWs2(localStorage.getItem("creds")!);
console.log("WebSocket connection established.");
},
});

Expand Down
6 changes: 5 additions & 1 deletion copilot-ui/src/components/CustomChatMessage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,11 @@ export const CustomChatMessage: FC<IChatbotMessageProps> = ({
message
) : (
<div className="text-sm max-w-[230px] md:max-w-[80%] mt-7 mb-7">
<p className="typewriter">{message.content}</p>
{message.response_type === "progress" ? (
<p className="copilot-thinking typewriter">{message.content}</p>
) : (
<p className="typewriter">{message.content}</p>
)}
<div className="flex mt-3">
<div
className="w-[28px] h-[28px] bg-shadeA flex items-center justify-center rounded-sm mr-1 cursor-pointer"
Expand Down
2 changes: 2 additions & 0 deletions copilot-ui/src/components/Start.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ const questions = [
{ title: "How TigerGraph can help me?" },
{ title: "How to use visualization correctly?" },
{ title: "How to detect fraud in transactions?" },
// { title: "What is William Torres' ID?" },
// { title: "What's his email?" },
];

interface Start {
Expand Down
25 changes: 25 additions & 0 deletions copilot-ui/src/index.css
Original file line number Diff line number Diff line change
Expand Up @@ -467,6 +467,31 @@
top: 30px;
}
}
/* animate ellipsis*/
.copilot-thinking::after {
overflow: hidden;
display: inline-block;
vertical-align: bottom;
animation: ellipsis steps(1, end) 1s infinite;
content: " ";
}
@keyframes ellipsis {
0% {
content: " ";
}
25% {
content: " .";
}
50% {
content: " ..";
}
75% {
content: " ...";
}
100% {
content: " ";
}
}

/* DEMO-SPECIFIC STYLES */
.type-writer {
Expand Down
25 changes: 25 additions & 0 deletions copilot/app/agent/Q.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from threading import Lock


DONE = "DONE"


class Q:
def __init__(self):
self.q = []
self.l = Lock()

def put(self, item):
with self.l:
self.q.append(item)

def pop(self):
if len(self.q) > 0:
with self.l:
item = self.q[0]
self.q = self.q[1:]
return item


def clear(self):
self.q.clear()
59 changes: 36 additions & 23 deletions copilot/app/agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,23 @@
from typing import Dict, List

from agent.agent_graph import TigerGraphAgentGraph
from agent.Q import Q
from fastapi import WebSocket
from tools import GenerateCypher, GenerateFunction, MapQuestionToSchema

from common.config import embedding_service, embedding_store, llm_config
from common.embeddings.base_embedding_store import EmbeddingStore
from common.embeddings.embedding_services import EmbeddingModel
from common.llm_services import (AWS_SageMaker_Endpoint, AWSBedrock,
AzureOpenAI, GoogleVertexAI, Groq,
HuggingFaceEndpoint, Ollama, OpenAI)
from common.llm_services import (
AWS_SageMaker_Endpoint,
AWSBedrock,
AzureOpenAI,
GoogleVertexAI,
Groq,
HuggingFaceEndpoint,
Ollama,
OpenAI,
)
from common.llm_services.base_llm import LLM_Model
from common.logs.log import req_id_cv
from common.logs.logwriter import LogWriter
Expand Down Expand Up @@ -43,6 +52,7 @@ def __init__(
embedding_model: EmbeddingModel,
embedding_store: EmbeddingStore,
use_cypher: bool = False,
ws=None,
):
self.conn = db_connection

Expand All @@ -62,26 +72,23 @@ def __init__(
embedding_store,
)

self.cypher_tool = None
if use_cypher:
self.cypher_tool = GenerateCypher(self.conn, self.llm)
self.agent = TigerGraphAgentGraph(
self.llm,
self.conn,
self.embedding_model,
self.embedding_store,
self.mq2s,
self.gen_func,
self.cypher_tool,
).create_graph()
else:
self.agent = TigerGraphAgentGraph(
self.llm,
self.conn,
self.embedding_model,
self.embedding_store,
self.mq2s,
self.gen_func,
).create_graph()

if ws is not None:
self.q = Q()

self.agent = TigerGraphAgentGraph(
self.llm,
self.conn,
self.embedding_model,
self.embedding_store,
self.mq2s,
self.gen_func,
cypher_gen_tool=self.cypher_tool,
q=self.q,
).create_graph()

logger.debug(f"request_id={req_id_cv.get()} agent initialized")

Expand Down Expand Up @@ -141,7 +148,7 @@ def question_for_agent(
)


def make_agent(graphname, conn, use_cypher) -> TigerGraphAgent:
def make_agent(graphname, conn, use_cypher, ws: WebSocket = None) -> TigerGraphAgent:
if llm_config["completion_service"]["llm_service"].lower() == "openai":
llm_service_name = "openai"
print(llm_config["completion_service"])
Expand Down Expand Up @@ -176,7 +183,13 @@ def make_agent(graphname, conn, use_cypher) -> TigerGraphAgent:
logger.debug(
f"/{graphname}/query_with_history request_id={req_id_cv.get()} llm_service={llm_service_name} agent created"
)

agent = TigerGraphAgent(
llm_provider, conn, embedding_service, embedding_store, use_cypher=use_cypher
llm_provider,
conn,
embedding_service,
embedding_store,
use_cypher=use_cypher,
ws=ws,
)
return agent
Loading
Loading