diff --git a/examples/quickstart.ipynb b/examples/quickstart.ipynb index 996f77f3..cbca5440 100644 --- a/examples/quickstart.ipynb +++ b/examples/quickstart.ipynb @@ -229,7 +229,7 @@ "\n", "import { TavilySearchResults } from \"@langchain/community/tools/tavily_search\";\n", "import { ChatOpenAI } from \"@langchain/openai\";\n", - "import { HumanMessage } from \"@langchain/core/messages\";\n", + "import { HumanMessage, AIMessage } from \"@langchain/core/messages\";\n", "import { ToolNode } from \"@langchain/langgraph/prebuilt\";\n", "import { StateGraph, MessagesAnnotation } from \"@langchain/langgraph\";\n", "\n", @@ -245,10 +245,10 @@ "\n", "// Define the function that determines whether to continue or not\n", "function shouldContinue({ messages }: typeof MessagesAnnotation.State) {\n", - " const lastMessage = messages[messages.length - 1];\n", + " const lastMessage = messages[messages.length - 1] as AIMessage;\n", "\n", " // If the LLM makes a tool call, then we route to the \"tools\" node\n", - " if (lastMessage.additional_kwargs.tool_calls) {\n", + " if (lastMessage.tool_calls?.length) {\n", " return \"tools\";\n", " }\n", " // Otherwise, we stop (reply to the user) using the special \"__end__\" node\n",