From b2bf17bd3c6255ce04a65624b0bbc2812027a319 Mon Sep 17 00:00:00 2001 From: Philip Date: Fri, 3 Jan 2025 12:42:10 -0500 Subject: [PATCH] Fixes for granite 3.1 --- .../1.0-basic-llm-prompt-granite3.ipynb | 24 ++++---- ...1.1-basic-llm-prompt-memory-granite3.ipynb | 18 +++--- .../02-tools/2.1-tools-calling-granite3.ipynb | 56 +++++++++++++------ .../02-tools/2.2-cot-prompting-granite3.ipynb | 13 +++-- .../2.3-react-prompting-granite3.ipynb | 19 ++++--- 5 files changed, 82 insertions(+), 48 deletions(-) diff --git a/lab-materials/01-llms/1.0-basic-llm-prompt-granite3.ipynb b/lab-materials/01-llms/1.0-basic-llm-prompt-granite3.ipynb index c5678b0..33f8efc 100644 --- a/lab-materials/01-llms/1.0-basic-llm-prompt-granite3.ipynb +++ b/lab-materials/01-llms/1.0-basic-llm-prompt-granite3.ipynb @@ -18,21 +18,23 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "a16ed2e6", "metadata": { "tags": [] }, "outputs": [], "source": [ - "#!pip install -q langchain==0.1.9 openai==1.13.3\n", - "# !pip install langchain-openai\n", + "!pip install -q langchain openai\n", + "!pip install langchain-openai\n", + "!pip install \"langchain-core==0.3.27\"\n", + "!pip install langchain_community\n", "#!pip uninstall langchain openai" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "60bb3f0f-40b5-49a6-b493-5e361db0113e", "metadata": { "tags": [] @@ -53,7 +55,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "7b908fd0-01dd-4ad2-b745-b3a4c56a7a7e", "metadata": { "tags": [] @@ -87,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "01baa2b8-529d-455d-ad39-ef4a96dbaf97", "metadata": { "tags": [] @@ -96,7 +98,7 @@ "source": [ "# LLM definition\n", "llm = ChatOpenAI(\n", - " openai_api_key=\"None\",\n", + " openai_api_key=API_KEY,\n", " openai_api_base= f\"{INFERENCE_SERVER_URL}/v1\",\n", " model_name=MODEL_NAME,\n", " top_p=0.92,\n", @@ -118,7 +120,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "fdd8bc4c-b353-4a51-a8b7-6cb348e19623", "metadata": { "tags": [] @@ -165,7 +167,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "f875c62b-931a-402e-9494-98b7296f8b2b", "metadata": { "tags": [] @@ -197,7 +199,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "ee09847d-9ff7-4181-ad11-96186bf0a322", "metadata": { "tags": [] @@ -236,7 +238,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "60ed6c44-a016-49bd-8623-f231c5bba5b6", "metadata": { "tags": [] diff --git a/lab-materials/01-llms/1.1-basic-llm-prompt-memory-granite3.ipynb b/lab-materials/01-llms/1.1-basic-llm-prompt-memory-granite3.ipynb index 7ca59cd..b3c1c3e 100644 --- a/lab-materials/01-llms/1.1-basic-llm-prompt-memory-granite3.ipynb +++ b/lab-materials/01-llms/1.1-basic-llm-prompt-memory-granite3.ipynb @@ -18,19 +18,23 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "a16ed2e6", "metadata": { "tags": [] }, "outputs": [], "source": [ - "#!pip install -q langchain==0.1.9 openai==1.13.3" + "!pip install -q langchain openai\n", + "!pip install langchain-openai\n", + "!pip install \"langchain-core==0.3.27\"\n", + "!pip install langchain_community\n", + "#!pip uninstall langchain openai" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "60bb3f0f-40b5-49a6-b493-5e361db0113e", "metadata": { "tags": [] @@ -48,7 +52,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "7b908fd0-01dd-4ad2-b745-b3a4c56a7a7e", "metadata": { "tags": [] @@ -70,7 +74,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "01baa2b8-529d-455d-ad39-ef4a96dbaf97", "metadata": { "tags": [] @@ -79,7 +83,7 @@ "source": [ "# LLM definition\n", "llm = ChatOpenAI(\n", - " openai_api_key=\"None\",\n", + " openai_api_key=API_KEY,\n", " openai_api_base= f\"{INFERENCE_SERVER_URL}/v1\",\n", " model_name=MODEL_NAME,\n", " top_p=0.92,\n", @@ -101,7 +105,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "fdd8bc4c-b353-4a51-a8b7-6cb348e19623", "metadata": { "tags": [] diff --git a/lab-materials/02-tools/2.1-tools-calling-granite3.ipynb b/lab-materials/02-tools/2.1-tools-calling-granite3.ipynb index ce5c940..53e3400 100644 --- a/lab-materials/02-tools/2.1-tools-calling-granite3.ipynb +++ b/lab-materials/02-tools/2.1-tools-calling-granite3.ipynb @@ -46,19 +46,23 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "a16ed2e6", "metadata": { "tags": [] }, "outputs": [], "source": [ - "#!pip install -q termcolor langchain_community duckduckgo_search wikipedia" + "!pip install -q langchain openai\n", + "!pip install langchain-openai\n", + "!pip install \"langchain-core==0.3.27\"\n", + "!pip install langchain_community\n", + "!pip install -q termcolor langchain_community duckduckgo_search wikipedia" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "60bb3f0f-40b5-49a6-b493-5e361db0113e", "metadata": { "tags": [] @@ -71,6 +75,7 @@ "from langchain.chains import ConversationChain\n", "from langchain.memory import ConversationBufferMemory\n", "from langchain.chains import LLMChain\n", + "from langchain_openai import ChatOpenAI\n", "from langchain_community.llms import VLLMOpenAI\n", "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", "from langchain.prompts import PromptTemplate" @@ -96,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "3d7998b5-4373-4b70-bd81-9617f7ff6bf1", "metadata": { "tags": [] @@ -133,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "ee09847d-9ff7-4181-ad11-96186bf0a322", "metadata": { "tags": [] @@ -203,7 +208,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "01baa2b8-529d-455d-ad39-ef4a96dbaf97", "metadata": { "tags": [] @@ -212,7 +217,7 @@ "source": [ "# LLM definition\n", "llm = ChatOpenAI(\n", - " openai_api_key=\"None\",\n", + " openai_api_key=API_KEY,\n", " openai_api_base= f\"{INFERENCE_SERVER_URL}/v1\",\n", " model_name=MODEL_NAME,\n", " top_p=0.92,\n", @@ -243,7 +248,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "fdd8bc4c-b353-4a51-a8b7-6cb348e19623", "metadata": { "tags": [] @@ -252,15 +257,34 @@ "source": [ "template = \"\"\"\\\n", "<|start_of_role|>system<|end_of_role|>\n", - "You are a helpful, respectful, and honest assistant. Always be as helpful as possible, while being safe. \n", - "Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. \n", - "Please ensure that your responses are socially unbiased and positive in nature.\n", + "You are a helpful, respectful, and honest assistant. Always be as helpful as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.\n", + "\n", + "If a question does not make sense or is not factually coherent, explain why instead of providing incorrect information. If you don't know the answer to a question, do not share false information.\n", + "\n", + "---\n", + "\n", + "You have access to several tools to assist you in completing tasks. You are responsible for determining when to use them and should break down complex tasks into subtasks if necessary. \n", + "\n", + "When using tools, follow this format:\n", + "\n", + "{{\n", + " \"action\": \"Specify the tool you want to use.\",\n", + " \"action_input\": {{ \n", + " \"key\": \"Value inputs to the tool in valid JSON format.\"\n", + " }}\n", + "}}\n", + "\n", + "You must always ensure the inputs are correct for the tool you call. Provide all output in JSON format.\n", + "\n", + "Be sure that is in JSON format the output, nothing else. Not repeate the json again.\n", + "\n", + "The available tools include:\n", + "\n", + "{tools_description}\n", "\n", - "If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrect. \n", - "If you don't know the answer to a question, please don't share false information.\n", "<|end_of_text|>\n", "\n", - "<|start_of_role|>user<|end_of_role|>{history}\n", + "<|start_of_role|>user<|end_of_role|>\n", "Human: {input}\n", "<|end_of_text|>\n", "\n", @@ -428,7 +452,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "id": "fb494726-27f2-417a-a7fe-386a823a791d", "metadata": { "tags": [] @@ -454,7 +478,7 @@ " if tool.name == action:\n", " print(f\"--> Executing tool: {tool.name}\")\n", " try:\n", - " query = action_input['query']['description']\n", + " query = action_input['query']\n", " print(f\"----> Executing DuckDuckGo search for: {query}\")\n", "\n", " result = tool.invoke(query)\n", diff --git a/lab-materials/02-tools/2.2-cot-prompting-granite3.ipynb b/lab-materials/02-tools/2.2-cot-prompting-granite3.ipynb index 0b51624..7e90e2a 100644 --- a/lab-materials/02-tools/2.2-cot-prompting-granite3.ipynb +++ b/lab-materials/02-tools/2.2-cot-prompting-granite3.ipynb @@ -45,7 +45,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "a16ed2e6", "metadata": { "tags": [] @@ -62,9 +62,10 @@ } ], "source": [ - "#!pip install -q langchain==0.1.9 openai==1.13.3\n", - "!pip install -q langchain-openai langchain\n", - "#!pip uninstall langchain openai" + "!pip install -q langchain openai\n", + "!pip install langchain-openai\n", + "!pip install \"langchain-core==0.3.27\"\n", + "!pip install langchain_community" ] }, { @@ -172,7 +173,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "f875c62b-931a-402e-9494-98b7296f8b2b", "metadata": { "tags": [] @@ -189,7 +190,7 @@ ], "source": [ "conversation = LLMChain(llm=llm,\n", - " prompt=PROMPT_LLM,\n", + " prompt=PROMPT,\n", " verbose=False,\n", " )" ] diff --git a/lab-materials/02-tools/2.3-react-prompting-granite3.ipynb b/lab-materials/02-tools/2.3-react-prompting-granite3.ipynb index 66fcf46..c7721aa 100644 --- a/lab-materials/02-tools/2.3-react-prompting-granite3.ipynb +++ b/lab-materials/02-tools/2.3-react-prompting-granite3.ipynb @@ -38,15 +38,18 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "a16ed2e6", "metadata": { "tags": [] }, "outputs": [], "source": [ - "#!pip install -q termcolor langchain_community duckduckgo_search wikipedia langchain langchain_experimental\n", - "#!pip install langchain_openai langchain_experimental" + "!pip install -q termcolor duckduckgo_search wikipedia \n", + "!pip install -q langchain openai\n", + "!pip install langchain-openai\n", + "!pip install \"langchain-core==0.3.27\"\n", + "!pip install langchain_community langchain_experimental" ] }, { @@ -81,7 +84,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "3d7998b5-4373-4b70-bd81-9617f7ff6bf1", "metadata": { "tags": [] @@ -116,7 +119,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "f26f1ce6-8db0-4f6c-9622-446617327ccf", "metadata": { "tags": [] @@ -141,7 +144,7 @@ } ], "source": [ - "repl.run(\"pip\")" + "repl.run(\"print('hello world') \")" ] }, { @@ -226,7 +229,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "01baa2b8-529d-455d-ad39-ef4a96dbaf97", "metadata": { "tags": [] @@ -235,7 +238,7 @@ "source": [ "# LLM definition\n", "llm = ChatOpenAI(\n", - " openai_api_key=\"None\",\n", + " openai_api_key=API_KEY,\n", " openai_api_base= f\"{INFERENCE_SERVER_URL}/v1\",\n", " model_name=MODEL_NAME,\n", " top_p=0.92,\n",