Skip to content

Commit

Permalink
Merge pull request #3688 from continuedev/dallin/toolsapalooza-openai…
Browse files Browse the repository at this point in the history
…-gemini

Open AI tools
  • Loading branch information
sestinj authored Jan 11, 2025
2 parents 0a18082 + 4a86e64 commit 7af1090
Show file tree
Hide file tree
Showing 16 changed files with 136 additions and 134 deletions.
9 changes: 6 additions & 3 deletions core/llm/constructMessages.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,18 +93,21 @@ export function constructMessages(
provider: string,
useTools: boolean,
): ChatMessage[] {
const filteredHistory = history.filter(
(item) => item.message.role !== "system",
);
const msgs: ChatMessage[] = [];

const systemMessage = constructSystemPrompt(model, provider, useTools);
if (systemMessage) {
msgs.push({
role: "system" as const,
role: "system",
content: systemMessage,
});
}

for (let i = 0; i < history.length; i++) {
const historyItem = history[i];
for (let i = 0; i < filteredHistory.length; i++) {
const historyItem = filteredHistory[i];

if (historyItem.message.role === "user") {
// Gather context items for user messages
Expand Down
6 changes: 1 addition & 5 deletions core/llm/countTokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -448,11 +448,7 @@ function compileChatMessages(
functionTokens + maxTokens + TOKEN_BUFFER_FOR_SAFETY,
);

if (
systemMessage &&
history.length >= 2 &&
history[history.length - 2].role === "system"
) {
if (history.length >= 2 && history[history.length - 2].role === "system") {
const movedSystemMessage = history.splice(-2, 1)[0];
history.unshift(movedSystemMessage);
}
Expand Down
7 changes: 5 additions & 2 deletions core/llm/llms/Ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -365,15 +365,17 @@ class Ollama extends BaseLLM {
signal: AbortSignal,
options: CompletionOptions,
): AsyncGenerator<ChatMessage> {
const ollamaMessages = messages.map(this._convertToOllamaMessage);
const chatOptions: OllamaChatOptions = {
model: this._getModel(),
messages: messages.map(this._convertToOllamaMessage),
messages: ollamaMessages,
options: this._getModelFileParams(options),
keep_alive: options.keepAlive ?? 60 * 30, // 30 minutes
stream: options.stream,
// format: options.format, // Not currently in base completion options
};
if (options.tools?.length) {
// This logic is because tools can ONLY be included with user message for ollama
if (options.tools?.length && ollamaMessages.at(-1)?.role === "user") {
chatOptions.tools = options.tools.map((tool) => ({
type: "function",
function: {
Expand Down Expand Up @@ -447,6 +449,7 @@ class Ollama extends BaseLLM {
try {
const j = JSON.parse(chunk) as OllamaChatResponse;
const chatMessage = convertChatMessage(j);
yield chatMessage;
} catch (e) {
throw new Error(`Error parsing Ollama response: ${e} ${chunk}`);
}
Expand Down
15 changes: 7 additions & 8 deletions core/llm/llms/OpenAI.ts
Original file line number Diff line number Diff line change
Expand Up @@ -253,6 +253,13 @@ class OpenAI extends BaseLLM {
body.max_completion_tokens = undefined;
}

if (body.tools?.length) {
// To ensure schema adherence: https://platform.openai.com/docs/guides/function-calling#parallel-function-calling-and-structured-outputs
// In practice, setting this to true and asking for multiple tool calls
// leads to "arguments" being something like '{"file": "test.ts"}{"file": "test.js"}'
body.parallel_tool_calls = false;
}

return body;
}

Expand Down Expand Up @@ -309,14 +316,6 @@ class OpenAI extends BaseLLM {

const body = this._convertArgs(options, messages);

// Empty messages cause an error in LM Studio
body.messages = body.messages.map((m: any) => ({
...m,
content: m.content === "" ? " " : m.content,
// We call it toolCalls, they call it tool_calls
tool_calls: m.toolCalls,
tool_call_id: m.toolCallId,
})) as any;
const response = await this.fetch(this._getEndpoint("chat/completions"), {
method: "POST",
headers: this._getHeaders(),
Expand Down
90 changes: 51 additions & 39 deletions core/llm/openaiTypeConverters.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { FimCreateParamsStreaming } from "@continuedev/openai-adapters/dist/apis/base";
import {
ChatCompletion,
ChatCompletionChunk,
Expand All @@ -6,7 +7,6 @@ import {
CompletionCreateParams,
} from "openai/resources/index";

import { FimCreateParamsStreaming } from "@continuedev/openai-adapters/dist/apis/base";
import { ChatMessage, CompletionOptions } from "..";

export function toChatMessage(
Expand All @@ -19,54 +19,56 @@ export function toChatMessage(
tool_call_id: message.toolCallId,
};
}

if (typeof message.content === "string") {
if (message.role === "system") {
return {
role: message.role,
role: "system",
content: message.content,
};
} else if (!message.content.some((item) => item.type !== "text")) {
// If no multi-media is in the message, just send as text
// for compatibility with OpenAI-"compatible" servers
// that don't support multi-media format
return {
...message,
content: message.content.map((item) => item.text).join(""),
};
}

const parts = message.content.map((part) => {
const msg: any = {
type: part.type,
text: part.text,
};
if (part.type === "imageUrl") {
msg.image_url = { ...part.imageUrl, detail: "auto" };
msg.type = "image_url";
}
return msg;
});

return {
...message,
content: parts,
let msg: ChatCompletionMessageParam = {
role: message.role,
content:
typeof message.content === "string"
? message.content || " " // LM Studio (and other providers) don't accept empty content
: !message.content.some((item) => item.type !== "text")
? // If no multi-media is in the message, just send as text
// for compatibility with OpenAI-"compatible" servers
// that don't support multi-media format
message.content.map((item) => item.text).join("") || " "
: message.content.map((part) => {
const msg: any = {
type: part.type,
text: part.text,
};
if (part.type === "imageUrl") {
msg.image_url = { ...part.imageUrl, detail: "auto" };
msg.type = "image_url";
}
return msg;
}),
};
if (
msg.role === "assistant" &&
message.role === "assistant" &&
message.toolCalls
) {
msg.tool_calls = message.toolCalls.map((toolCall) => ({
id: toolCall.id!,
type: toolCall.type!,
function: {
name: toolCall.function?.name!,
arguments: toolCall.function?.arguments!,
},
}));
}
return msg;
}

export function toChatBody(
messages: ChatMessage[],
options: CompletionOptions,
): ChatCompletionCreateParams {
const tools = options.tools?.map((tool) => ({
type: tool.type,
function: {
name: tool.function.name,
description: tool.function.description,
parameters: tool.function.parameters,
strict: tool.function.strict,
},
}));

const params: ChatCompletionCreateParams = {
messages: messages.map(toChatMessage),
model: options.model,
Expand All @@ -79,9 +81,19 @@ export function toChatBody(
stop: options.stop,
prediction: options.prediction,
};
if (tools?.length) {
params.tools = tools;

if (options.tools?.length) {
params.tools = options.tools.map((tool) => ({
type: tool.type,
function: {
name: tool.function.name,
description: tool.function.description,
parameters: tool.function.parameters,
strict: tool.function.strict,
},
}));
}

return params;
}

Expand Down
6 changes: 6 additions & 0 deletions core/llm/toolSupport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,12 @@ export const PROVIDER_TOOL_SUPPORT: Record<
return true;
}
},
openai: (model) => {
// https://platform.openai.com/docs/guides/function-calling#models-supporting-function-calling
if (model.toLowerCase().startsWith("gpt-4")) {
return true;
}
},
// https://ollama.com/search?c=tools
ollama: (model) => {
if (
Expand Down
9 changes: 0 additions & 9 deletions core/util/paths.ts
Original file line number Diff line number Diff line change
Expand Up @@ -334,15 +334,6 @@ export function getLogsDirPath(): string {
return logsPath;
}

export function getLogFilePath(): string {
const logFilePath = path.join(getContinueGlobalPath(), "continue.log");
// Make sure the file/directory exist
if (!fs.existsSync(logFilePath)) {
fs.writeFileSync(logFilePath, "");
}
return logFilePath;
}

export function getCoreLogsPath(): string {
return path.join(getLogsDirPath(), "core.log");
}
Expand Down
2 changes: 1 addition & 1 deletion extensions/vscode/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "continue",
"icon": "media/icon.png",
"author": "Continue Dev, Inc",
"version": "0.9.250",
"version": "0.9.251",
"repository": {
"type": "git",
"url": "https://github.com/continuedev/continue"
Expand Down
9 changes: 2 additions & 7 deletions extensions/vscode/src/commands.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,7 @@ import { EXTENSION_NAME } from "core/control-plane/env";
import { Core } from "core/core";
import { walkDirAsync } from "core/indexing/walkDir";
import { GlobalContext } from "core/util/GlobalContext";
import {
getConfigJsonPath,
getDevDataFilePath,
getLogFilePath,
} from "core/util/paths";
import { getConfigJsonPath, getDevDataFilePath } from "core/util/paths";
import { Telemetry } from "core/util/posthog";
import readLastLines from "read-last-lines";
import * as vscode from "vscode";
Expand Down Expand Up @@ -651,8 +647,7 @@ const getCommandsMap: (
},
"continue.viewLogs": async () => {
captureCommandTelemetry("viewLogs");
const logFilePath = getLogFilePath();
await vscode.window.showTextDocument(vscode.Uri.file(logFilePath));
vscode.commands.executeCommand("workbench.action.toggleDevTools");
},
"continue.debugTerminal": async () => {
captureCommandTelemetry("debugTerminal");
Expand Down
1 change: 0 additions & 1 deletion extensions/vscode/src/extension/VsCodeMessenger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ export class VsCodeMessenger {
});

this.onWebview("toggleDevTools", (msg) => {
vscode.commands.executeCommand("workbench.action.toggleDevTools");
vscode.commands.executeCommand("continue.viewLogs");
});
this.onWebview("reloadWindow", (msg) => {
Expand Down
54 changes: 24 additions & 30 deletions gui/src/components/StepContainer/ResponseActions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ export interface ResponseActionsProps {
index: number;
onDelete: () => void;
item: ChatHistoryItem;
shouldHideActions: boolean;
}

export default function ResponseActions({
Expand All @@ -23,7 +22,6 @@ export default function ResponseActions({
item,
isTruncated,
onDelete,
shouldHideActions,
}: ResponseActionsProps) {
const isInEditMode = useAppSelector(selectIsInEditMode);

Expand All @@ -33,37 +31,33 @@ export default function ResponseActions({

return (
<div className="mx-2 flex cursor-default items-center justify-end space-x-1 bg-transparent pb-0 text-xs text-gray-400">
{shouldHideActions || (
<>
{isTruncated && (
<HeaderButtonWithToolTip
tabIndex={-1}
text="Continue generation"
onClick={onContinueGeneration}
>
<BarsArrowDownIcon className="h-3.5 w-3.5 text-gray-500" />
</HeaderButtonWithToolTip>
)}
{isTruncated && (
<HeaderButtonWithToolTip
tabIndex={-1}
text="Continue generation"
onClick={onContinueGeneration}
>
<BarsArrowDownIcon className="h-3.5 w-3.5 text-gray-500" />
</HeaderButtonWithToolTip>
)}

<HeaderButtonWithToolTip
testId={`delete-button-${index}`}
text="Delete"
tabIndex={-1}
onClick={onDelete}
>
<TrashIcon className="h-3.5 w-3.5 text-gray-500" />
</HeaderButtonWithToolTip>
<HeaderButtonWithToolTip
testId={`delete-button-${index}`}
text="Delete"
tabIndex={-1}
onClick={onDelete}
>
<TrashIcon className="h-3.5 w-3.5 text-gray-500" />
</HeaderButtonWithToolTip>

<CopyIconButton
tabIndex={-1}
text={renderChatMessage(item.message)}
clipboardIconClassName="h-3.5 w-3.5 text-gray-500"
checkIconClassName="h-3.5 w-3.5 text-green-400"
/>
<CopyIconButton
tabIndex={-1}
text={renderChatMessage(item.message)}
clipboardIconClassName="h-3.5 w-3.5 text-gray-500"
checkIconClassName="h-3.5 w-3.5 text-green-400"
/>

<FeedbackButtons item={item} />
</>
)}
<FeedbackButtons item={item} />
</div>
);
}
Loading

0 comments on commit 7af1090

Please sign in to comment.