Skip to content

Commit

Permalink
feat: add support of Serper and Bing search
Browse files Browse the repository at this point in the history
  • Loading branch information
feiskyer committed Apr 8, 2024
1 parent e8c7849 commit bf65072
Show file tree
Hide file tree
Showing 8 changed files with 135 additions and 57 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# CHANGELOG

## v4.4.1

* Add support of Serper and Bing search
* Add searching support for Claude models

## v4.4.0

* Add support for Anthropic Claude 3
Expand Down
16 changes: 13 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"displayName": "ChatGPT Copilot",
"icon": "images/ai-logo.png",
"description": "An VS Code ChatGPT Copilot Extension",
"version": "4.4.0",
"version": "4.4.1",
"aiKey": "",
"repository": {
"url": "https://github.com/feiskyer/chatgpt-copilot"
Expand Down Expand Up @@ -523,7 +523,17 @@
"chatgpt.gpt3.googleCSEId": {
"type": "string",
"markdownDescription": "Google custom search ID.",
"order": 40
"order": 41
},
"chatgpt.gpt3.serperKey": {
"type": "string",
"markdownDescription": "API key of Serper search API.",
"order": 42
},
"chatgpt.gpt3.bingKey": {
"type": "string",
"markdownDescription": "API key of Bing search API.",
"order": 43
}
}
}
Expand Down Expand Up @@ -587,4 +597,4 @@
"resolutions": {
"clone-deep": "^4.0.1"
}
}
}
32 changes: 19 additions & 13 deletions src/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,33 +20,39 @@ import { XMLAgentOutputParser } from "langchain/agents/xml/output_parser";
import { ChatPromptTemplate as ChatPromptTemplatePackage } from "langchain/prompts";
import { AgentStep } from "langchain/schema";
import { RunnableSequence } from "langchain/schema/runnable";
import { ChatMessageHistory } from "langchain/stores/message/in_memory";
import { GoogleCustomSearch, Tool } from "langchain/tools";
import { BingSerpAPI, GoogleCustomSearch, Serper, Tool } from "langchain/tools";
import { Calculator } from "langchain/tools/calculator";
import { renderTextDescription } from "langchain/tools/render";
import ChatGptViewProvider from "./chatgpt-view-provider";
import { ModelConfig } from "./model-config";

// initClaudeModel initializes the Claude model with the given parameters.
export async function initClaudeModel(viewProvider: ChatGptViewProvider, apiKey: string, apiBaseUrl: string, maxTokens: number, temperature: number, topP: number, googleCSEApiKey: string, googleCSEId: string, messageHistory: ChatMessageHistory) {
export async function initClaudeModel(viewProvider: ChatGptViewProvider, config: ModelConfig) {
const apiClaude = new ChatAnthropic({
topP: topP,
temperature: temperature,
topP: config.topP,
temperature: config.temperature,
modelName: viewProvider.model,
anthropicApiKey: apiKey,
anthropicApiUrl: apiBaseUrl,
anthropicApiKey: config.apiKey,
anthropicApiUrl: config.apiBaseUrl,
streaming: true,
maxTokens: maxTokens,
maxTokens: config.maxTokens,
}).bind({
stop: ["</tool_input>", "</final_answer>"],
});

let tools: Tool[] = [new Calculator()];
if (googleCSEApiKey != "" && googleCSEId != "") {
if (config.googleCSEApiKey != "" && config.googleCSEId != "") {
tools.push(new GoogleCustomSearch({
apiKey: googleCSEApiKey,
googleCSEId: googleCSEId,
apiKey: config.googleCSEApiKey,
googleCSEId: config.googleCSEId,
}));
}
if (config.serperKey != "") {
tools.push(new Serper(config.serperKey));
}
if (config.bingKey != "") {
tools.push(new BingSerpAPI(config.bingKey));
}

const systemContext = `You are ChatGPT helping the User with coding.
You are intelligent, helpful and an expert developer, who always gives the correct answer and only does what instructed.
Expand Down Expand Up @@ -85,7 +91,7 @@ Ensure the final answer is in the same language as the question, unless otherwis
try {
const steps = super.parse(text);
return steps;
} catch (error) {
} catch (error: any) {
if (error.message.includes("Could not parse LLM output")) {
const msg = error.message.replace("Could not parse LLM output:", "");
const agentFinish: AgentFinish = {
Expand Down Expand Up @@ -125,7 +131,7 @@ Ensure the final answer is in the same language as the question, unless otherwis
viewProvider.tools = tools;
viewProvider.chain = new RunnableWithMessageHistory({
runnable: agentExecutor,
getMessageHistory: (_sessionId) => messageHistory,
getMessageHistory: (_sessionId) => config.messageHistory,
inputMessagesKey: "input",
historyMessagesKey: "chat_history",
});
Expand Down
12 changes: 9 additions & 3 deletions src/chatgpt-view-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import { ChatMessageHistory } from "langchain/stores/message/in_memory";
import { Tool } from "langchain/tools";
import * as vscode from "vscode";
import { initClaudeModel } from "./anthropic";
import { ModelConfig } from "./model-config";
import { chatGpt, initGptModel } from "./openai";
import { chatCompletion, initGptLegacyModel } from "./openai-legacy";

Expand Down Expand Up @@ -244,6 +245,8 @@ export default class ChatGptViewProvider implements vscode.WebviewViewProvider {
const topP = configuration.get("gpt3.top_p") as number;
const googleCSEApiKey = configuration.get("gpt3.googleCSEApiKey") as string;
const googleCSEId = configuration.get("gpt3.googleCSEId") as string;
const serperKey = configuration.get("gpt3.serperKey") as string;
const bingKey = configuration.get("gpt3.bingKey") as string;
let apiBaseUrl = configuration.get("gpt3.apiBaseUrl") as string;
if (!apiBaseUrl) {
if (this.isGpt35Model) {
Expand Down Expand Up @@ -301,12 +304,15 @@ export default class ChatGptViewProvider implements vscode.WebviewViewProvider {
}

this.memory = new ChatMessageHistory();
const modelConfig = new ModelConfig(
{ apiKey, apiBaseUrl, maxTokens, temperature, topP, organization, googleCSEApiKey, googleCSEId, serperKey, bingKey, messageHistory: this.memory },
);
if (this.isGpt35Model) {
await initGptModel(this, apiKey, apiBaseUrl, maxTokens, temperature, topP, organization, googleCSEApiKey, googleCSEId, this.memory);
await initGptModel(this, modelConfig);
} else if (this.isClaude) {
await initClaudeModel(this, apiKey, apiBaseUrl, maxTokens, temperature, topP, googleCSEApiKey, googleCSEId, this.memory);
await initClaudeModel(this, modelConfig);
} else {
initGptLegacyModel(this, apiBaseUrl, apiKey, maxTokens, temperature, topP, organization);
initGptLegacyModel(this, modelConfig);
}
}

Expand Down
2 changes: 2 additions & 0 deletions src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,8 @@ export async function activate(context: vscode.ExtensionContext) {
e.affectsConfiguration("chatgpt.gpt3.temperature") ||
e.affectsConfiguration("chatgpt.gpt3.googleCSEId") ||
e.affectsConfiguration("chatgpt.gpt3.googleCSEApiKey") ||
e.affectsConfiguration("chatgpt.gpt3.serperKey") ||
e.affectsConfiguration("chatgpt.gpt3.bingKey") ||
e.affectsConfiguration("chatgpt.gpt3.top_p")
) {
provider.prepareConversation(true);
Expand Down
42 changes: 42 additions & 0 deletions src/model-config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/* eslint-disable eqeqeq */
/* eslint-disable @typescript-eslint/naming-convention */
/**
* @author Pengfei Ni
*
* @license
* Copyright (c) 2024 - Present, Pengfei Ni
*
* All rights reserved. Code licensed under the ISC license
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*/
import { ChatMessageHistory } from "langchain/stores/message/in_memory";

export class ModelConfig {
apiKey: string;
apiBaseUrl: string;
maxTokens: number;
temperature: number;
topP: number;
organization: string;
googleCSEApiKey: string;
googleCSEId: string;
serperKey: string;
bingKey: string;
messageHistory: ChatMessageHistory;

constructor({ apiKey, apiBaseUrl, maxTokens, temperature, topP, organization, googleCSEApiKey, googleCSEId, serperKey, bingKey, messageHistory }: { apiKey: string; apiBaseUrl: string; maxTokens: number; temperature: number; topP: number; organization: string; googleCSEApiKey: string; googleCSEId: string; serperKey: string; bingKey: string; messageHistory: ChatMessageHistory; }) {
this.apiKey = apiKey;
this.apiBaseUrl = apiBaseUrl;
this.maxTokens = maxTokens;
this.temperature = temperature;
this.topP = topP;
this.organization = organization;
this.googleCSEApiKey = googleCSEApiKey;
this.googleCSEId = googleCSEId;
this.serperKey = serperKey;
this.bingKey = bingKey;
this.messageHistory = messageHistory;
}
}
31 changes: 16 additions & 15 deletions src/openai-legacy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,37 +21,38 @@ import {
SystemMessagePromptTemplate
} from "langchain/prompts";
import ChatGptViewProvider, { logger } from "./chatgpt-view-provider";
import { ModelConfig } from "./model-config";

// initGptLegacyModel initializes the GPT legacy model.
export function initGptLegacyModel(viewProvider: ChatGptViewProvider, apiBaseUrl: string, apiKey: string, maxTokens: number, temperature: number, topP: number, organization: string) {
if (apiBaseUrl?.includes("azure")) {
const instanceName = apiBaseUrl.split(".")[0].split("//")[1];
const deployName = apiBaseUrl.split("/")[apiBaseUrl.split("/").length - 1];
export function initGptLegacyModel(viewProvider: ChatGptViewProvider, config: ModelConfig) {
if (config.apiBaseUrl?.includes("azure")) {
const instanceName = config.apiBaseUrl.split(".")[0].split("//")[1];
const deployName = config.apiBaseUrl.split("/")[config.apiBaseUrl.split("/").length - 1];
viewProvider.apiCompletion = new OpenAI({
modelName: viewProvider.model,
azureOpenAIApiKey: apiKey,
azureOpenAIApiKey: config.apiKey,
azureOpenAIApiInstanceName: instanceName,
azureOpenAIApiDeploymentName: deployName,
azureOpenAIApiCompletionsDeploymentName: deployName,
azureOpenAIApiVersion: "2024-02-01",
maxTokens: maxTokens,
maxTokens: config.maxTokens,
streaming: true,
temperature: temperature,
topP: topP,
temperature: config.temperature,
topP: config.topP,
});
} else {
// OpenAI
viewProvider.apiCompletion = new OpenAI({
openAIApiKey: apiKey,
openAIApiKey: config.apiKey,
modelName: viewProvider.model,
maxTokens: maxTokens,
maxTokens: config.maxTokens,
streaming: true,
temperature: temperature,
topP: topP,
temperature: config.temperature,
topP: config.topP,
configuration: {
apiKey: apiKey,
baseURL: apiBaseUrl,
organization: organization,
apiKey: config.apiKey,
baseURL: config.apiBaseUrl,
organization: config.organization,
},
});
}
Expand Down
52 changes: 29 additions & 23 deletions src/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,63 +21,69 @@ import {
MessagesPlaceholder,
SystemMessagePromptTemplate
} from "langchain/prompts";
import { ChatMessageHistory } from "langchain/stores/message/in_memory";
import { GoogleCustomSearch, Tool } from "langchain/tools";
import { BingSerpAPI, GoogleCustomSearch, Serper, Tool } from "langchain/tools";
import { Calculator } from "langchain/tools/calculator";
import { WebBrowser } from "langchain/tools/webbrowser";
import ChatGptViewProvider, { logger } from "./chatgpt-view-provider";
import { ModelConfig } from "./model-config";

// initGptModel initializes the GPT model.
export async function initGptModel(viewProvider: ChatGptViewProvider, apiKey: string, apiBaseUrl: string, maxTokens: number, temperature: number, topP: number, organization: string, googleCSEApiKey: string, googleCSEId: string, messageHistory: ChatMessageHistory) {
export async function initGptModel(viewProvider: ChatGptViewProvider, config: ModelConfig) {
let tools: Tool[] = [new Calculator()];
if (googleCSEApiKey != "" && googleCSEId != "") {
if (config.googleCSEApiKey != "" && config.googleCSEId != "") {
tools.push(new GoogleCustomSearch({
apiKey: googleCSEApiKey,
googleCSEId: googleCSEId,
apiKey: config.googleCSEApiKey,
googleCSEId: config.googleCSEId,
}));
}
if (config.serperKey != "") {
tools.push(new Serper(config.serperKey));
}
if (config.bingKey != "") {
tools.push(new BingSerpAPI(config.bingKey));
}

let embeddings = new OpenAIEmbeddings({
modelName: "text-embedding-ada-002",
openAIApiKey: apiKey,
openAIApiKey: config.apiKey,
});
// AzureOpenAI
if (apiBaseUrl?.includes("azure")) {
const instanceName = apiBaseUrl.split(".")[0].split("//")[1];
const deployName = apiBaseUrl.split("/")[apiBaseUrl.split("/").length - 1];
if (config.apiBaseUrl?.includes("azure")) {
const instanceName = config.apiBaseUrl.split(".")[0].split("//")[1];
const deployName = config.apiBaseUrl.split("/")[config.apiBaseUrl.split("/").length - 1];
embeddings = new OpenAIEmbeddings({
azureOpenAIApiEmbeddingsDeploymentName: "text-embedding-ada-002",
azureOpenAIApiKey: apiKey,
azureOpenAIApiKey: config.apiKey,
azureOpenAIApiInstanceName: instanceName,
azureOpenAIApiDeploymentName: deployName,
azureOpenAIApiCompletionsDeploymentName: deployName,
azureOpenAIApiVersion: "2024-02-01",
});
viewProvider.apiChat = new ChatOpenAI({
modelName: viewProvider.model,
azureOpenAIApiKey: apiKey,
azureOpenAIApiKey: config.apiKey,
azureOpenAIApiInstanceName: instanceName,
azureOpenAIApiDeploymentName: deployName,
azureOpenAIApiCompletionsDeploymentName: deployName,
azureOpenAIApiVersion: "2024-02-01",
maxTokens: maxTokens,
maxTokens: config.maxTokens,
streaming: true,
temperature: temperature,
topP: topP,
temperature: config.temperature,
topP: config.topP,
});
} else {
// OpenAI
viewProvider.apiChat = new ChatOpenAI({
openAIApiKey: apiKey,
openAIApiKey: config.apiKey,
modelName: viewProvider.model,
maxTokens: maxTokens,
maxTokens: config.maxTokens,
streaming: true,
temperature: temperature,
topP: topP,
temperature: config.temperature,
topP: config.topP,
configuration: {
apiKey: apiKey,
baseURL: apiBaseUrl,
organization: organization,
apiKey: config.apiKey,
baseURL: config.apiBaseUrl,
organization: config.organization,
},
});
}
Expand Down Expand Up @@ -109,7 +115,7 @@ where necessary. Respond in the same language as the query, unless otherwise spe
viewProvider.tools = tools;
viewProvider.chain = new RunnableWithMessageHistory({
runnable: agentExecutor,
getMessageHistory: (_sessionId) => messageHistory,
getMessageHistory: (_sessionId) => config.messageHistory,
inputMessagesKey: "input",
historyMessagesKey: "chat_history",
});
Expand Down

0 comments on commit bf65072

Please sign in to comment.