Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(auto-edit): add basic webview debug-panel #7304

Merged
merged 6 commits into from
Mar 5, 2025
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions lib/shared/src/inferenceClient/misc.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,18 @@ export type CompletionResponseWithMetaData = {
* extract metadata required for analytics in one place.
*/
response?: BrowserOrNodeResponse
/**
* Optional request headers sent to the model API
*/
requestHeaders?: Record<string, string>
/**
* URL used to make the request to the model API
*/
requestUrl?: string
/**
* Optional request body sent to the model API
*/
requestBody?: any
}
}

Expand Down
18 changes: 18 additions & 0 deletions vscode/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,14 @@
"group": "Cody",
"icon": "$(feedback)"
},
{
"command": "cody.command.autoedit.open-debug-panel",
"title": "Debug Auto-Edit",
"category": "Cody",
"group": "Cody",
"icon": "$(debug)",
"enablement": "cody.activated"
},
{
"command": "cody.command.explain-output",
"title": "Ask Cody to Explain",
Expand Down Expand Up @@ -582,6 +590,12 @@
"command": "cody.command.autoedit-manual-trigger",
"title": "Autoedits Manual Trigger",
"enablement": "cody.activated && config.cody.suggestions.mode == 'auto-edit (Experimental)'"
},
{
"command": "cody.command.autoedit.open-debug-panel",
"category": "Cody",
"title": "Debug Auto-Edit",
"enablement": "cody.activated"
}
],
"keybindings": [
Expand Down Expand Up @@ -721,6 +735,10 @@
],
"menus": {
"commandPalette": [
{
"command": "cody.command.autoedit.open-debug-panel",
"when": "cody.activated"
},
{
"command": "cody.command.edit-code",
"when": "cody.activated && editorIsOpen"
Expand Down
25 changes: 23 additions & 2 deletions vscode/src/autoedits/adapters/base.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,28 @@
import type { PromptString } from '@sourcegraph/cody-shared'
import type { CodeCompletionsParams, PromptString } from '@sourcegraph/cody-shared'
import type { AutoeditsRequestBody } from './utils'

export interface ModelResponse {
prediction: string
/** URL used to make the request to the model API */
requestUrl: string
/** Response headers received from the model API */
responseHeaders: Record<string, string>
/** Optional request headers sent to the model API */
requestHeaders?: Record<string, string>
/**
* Optional request body sent to the model API
* TODO: update to proper types from different adapters.
*/
requestBody?: AutoeditsRequestBody | CodeCompletionsParams
/**
* Optional full response body received from the model API
* This is propagated to the analytics logger for debugging purposes
*/
responseBody?: any
}

export interface AutoeditsModelAdapter {
getModelResponse(args: AutoeditModelOptions): Promise<string>
getModelResponse(args: AutoeditModelOptions): Promise<ModelResponse>
}

/**
Expand Down
3 changes: 3 additions & 0 deletions vscode/src/autoedits/adapters/cody-gateway.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ describe('CodyGatewayAdapter', () => {
// Mock successful response
mockFetch.mockResolvedValueOnce({
status: 200,
headers: new Headers(),
json: () => Promise.resolve({ choices: [{ message: { content: 'response' } }] }),
})

Expand Down Expand Up @@ -90,6 +91,7 @@ describe('CodyGatewayAdapter', () => {

mockFetch.mockResolvedValueOnce({
status: 200,
headers: new Headers(),
json: () => Promise.resolve({ choices: [{ text: 'response' }] }),
})

Expand All @@ -116,6 +118,7 @@ describe('CodyGatewayAdapter', () => {
it('handles error responses correctly', async () => {
mockFetch.mockResolvedValueOnce({
status: 400,
headers: new Headers(),
text: () => Promise.resolve('Bad Request'),
})

Expand Down
59 changes: 42 additions & 17 deletions vscode/src/autoedits/adapters/cody-gateway.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,47 @@ import { currentResolvedConfig, dotcomTokenToGatewayToken } from '@sourcegraph/c

import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import type { AutoeditModelOptions, AutoeditsModelAdapter, ModelResponse } from './base'
import {
type AutoeditsRequestBody,
type FireworksChatModelRequestParams,
type FireworksCompatibleRequestParams,
type FireworksCompletionModelRequestParams,
getMaxOutputTokensForAutoedits,
getModelResponse,
getOpenaiCompatibleChatPrompt,
} from './utils'

export class CodyGatewayAdapter implements AutoeditsModelAdapter {
public async getModelResponse(options: AutoeditModelOptions): Promise<string> {
public async getModelResponse(options: AutoeditModelOptions): Promise<ModelResponse> {
const headers = {
'X-Sourcegraph-Feature': 'code_completions',
}
const body = this.getMessageBody(options)
try {
const apiKey = await this.getApiKey()
const response = await getModelResponse(options.url, body, apiKey, headers)
const { data, requestHeaders, responseHeaders, url } = await getModelResponse(
options.url,
JSON.stringify(body),
apiKey,
headers
)

let prediction: string
if (options.isChatModel) {
return response.choices[0].message.content
prediction = data.choices[0].message.content
} else {
prediction = data.choices[0].text
}

return {
prediction,
responseHeaders,
requestHeaders,
requestBody: body,
requestUrl: url,
responseBody: data,
}
return response.choices[0].text
} catch (error) {
autoeditsOutputChannelLogger.logError('getModelResponse', 'Error calling Cody Gateway:', {
verbose: error,
Expand All @@ -46,9 +66,9 @@ export class CodyGatewayAdapter implements AutoeditsModelAdapter {
return fastPathAccessToken
}

private getMessageBody(options: AutoeditModelOptions): string {
private getMessageBody(options: AutoeditModelOptions): AutoeditsRequestBody {
const maxTokens = getMaxOutputTokensForAutoedits(options.codeToRewrite)
const body: FireworksCompatibleRequestParams = {
const baseBody: FireworksCompatibleRequestParams = {
stream: false,
model: options.model,
temperature: 0.1,
Expand All @@ -63,15 +83,20 @@ export class CodyGatewayAdapter implements AutoeditsModelAdapter {
rewrite_speculation: true,
user: options.userId || undefined,
}
const request = options.isChatModel
? {
...body,
messages: getOpenaiCompatibleChatPrompt({
systemMessage: options.prompt.systemMessage,
userMessage: options.prompt.userMessage,
}),
}
: { ...body, prompt: options.prompt.userMessage }
return JSON.stringify(request)

if (options.isChatModel) {
return {
...baseBody,
messages: getOpenaiCompatibleChatPrompt({
systemMessage: options.prompt.systemMessage,
userMessage: options.prompt.userMessage,
}),
} as FireworksChatModelRequestParams
}

return {
...baseBody,
prompt: options.prompt.userMessage,
} as FireworksCompletionModelRequestParams
}
}
9 changes: 7 additions & 2 deletions vscode/src/autoedits/adapters/fireworks.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ describe('FireworksAdapter', () => {
it('sends correct request parameters for chat model', async () => {
mockFetch.mockResolvedValueOnce({
status: 200,
headers: new Headers(),
json: () => Promise.resolve({ choices: [{ message: { content: 'response' } }] }),
})

Expand Down Expand Up @@ -82,6 +83,7 @@ describe('FireworksAdapter', () => {

mockFetch.mockResolvedValueOnce({
status: 200,
headers: new Headers(),
json: () => Promise.resolve({ choices: [{ text: 'response' }] }),
})

Expand All @@ -108,6 +110,7 @@ describe('FireworksAdapter', () => {
it('handles error responses correctly', async () => {
mockFetch.mockResolvedValueOnce({
status: 400,
headers: new Headers(),
text: () => Promise.resolve('Bad Request'),
})

Expand All @@ -118,11 +121,12 @@ describe('FireworksAdapter', () => {
const expectedResponse = 'modified code'
mockFetch.mockResolvedValueOnce({
status: 200,
headers: new Headers(),
json: () => Promise.resolve({ choices: [{ message: { content: expectedResponse } }] }),
})

const response = await adapter.getModelResponse(options)
expect(response).toBe(expectedResponse)
expect(response.prediction).toBe(expectedResponse)
})

it('returns correct response for completions model', async () => {
Expand All @@ -131,10 +135,11 @@ describe('FireworksAdapter', () => {

mockFetch.mockResolvedValueOnce({
status: 200,
headers: new Headers(),
json: () => Promise.resolve({ choices: [{ text: expectedResponse }] }),
})

const response = await adapter.getModelResponse(nonChatOptions)
expect(response).toBe(expectedResponse)
expect(response.prediction).toBe(expectedResponse)
})
})
24 changes: 19 additions & 5 deletions vscode/src/autoedits/adapters/fireworks.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { autoeditsProviderConfig } from '../autoedits-config'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import type { AutoeditModelOptions, AutoeditsModelAdapter, ModelResponse } from './base'
import {
type FireworksCompatibleRequestParams,
getMaxOutputTokensForAutoedits,
Expand All @@ -10,7 +10,7 @@ import {
} from './utils'

export class FireworksAdapter implements AutoeditsModelAdapter {
async getModelResponse(option: AutoeditModelOptions): Promise<string> {
async getModelResponse(option: AutoeditModelOptions): Promise<ModelResponse> {
const body = this.getMessageBody(option)
try {
const apiKey = autoeditsProviderConfig.experimentalAutoeditsConfigOverride?.apiKey
Expand All @@ -22,11 +22,25 @@ export class FireworksAdapter implements AutoeditsModelAdapter {
)
throw new Error('No api key provided in the config override')
}
const response = await getModelResponse(option.url, body, apiKey)
const { data, requestHeaders, responseHeaders, url } = await getModelResponse(
option.url,
body,
apiKey
)

let prediction: string
if (option.isChatModel) {
return response.choices[0].message.content
prediction = data.choices[0].message.content
} else {
prediction = data.choices[0].text
}

return {
prediction,
responseHeaders,
requestHeaders,
requestUrl: url,
}
return response.choices[0].text
} catch (error) {
autoeditsOutputChannelLogger.logError('getModelResponse', 'Error calling Fireworks API:', {
verbose: error,
Expand Down
14 changes: 10 additions & 4 deletions vscode/src/autoedits/adapters/openai.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import { autoeditsProviderConfig } from '../autoedits-config'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import type { AutoeditModelOptions, AutoeditsModelAdapter, ModelResponse } from './base'
import { getModelResponse, getOpenaiCompatibleChatPrompt } from './utils'

export class OpenAIAdapter implements AutoeditsModelAdapter {
async getModelResponse(options: AutoeditModelOptions): Promise<string> {
async getModelResponse(options: AutoeditModelOptions): Promise<ModelResponse> {
try {
const apiKey = autoeditsProviderConfig.experimentalAutoeditsConfigOverride?.apiKey

Expand All @@ -17,7 +17,7 @@ export class OpenAIAdapter implements AutoeditsModelAdapter {
throw new Error('No api key provided in the config override')
}

const response = await getModelResponse(
const { data, requestHeaders, responseHeaders, url } = await getModelResponse(
options.url,
JSON.stringify({
model: options.model,
Expand All @@ -33,7 +33,13 @@ export class OpenAIAdapter implements AutoeditsModelAdapter {
}),
apiKey
)
return response.choices[0].message.content

return {
prediction: data.choices[0].message.content,
responseHeaders,
requestHeaders,
requestUrl: url,
}
} catch (error) {
autoeditsOutputChannelLogger.logError('getModelResponse', 'Error calling OpenAI API:', {
verbose: error,
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/autoedits/adapters/sourcegraph-chat.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ describe('SourcegraphChatAdapter', () => {
mockChatClient.chat = mockChat

const response = await adapter.getModelResponse(options)
expect(response).toBe('part1part2')
expect(response.prediction).toBe('part1part2')
})

it('handles errors correctly', async () => {
Expand Down
12 changes: 9 additions & 3 deletions vscode/src/autoedits/adapters/sourcegraph-chat.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import type { ChatClient, Message } from '@sourcegraph/cody-shared'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'
import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import type { AutoeditModelOptions, AutoeditsModelAdapter, ModelResponse } from './base'
import { getMaxOutputTokensForAutoedits, getSourcegraphCompatibleChatPrompt } from './utils'

export class SourcegraphChatAdapter implements AutoeditsModelAdapter {
constructor(private readonly chatClient: ChatClient) {}

async getModelResponse(option: AutoeditModelOptions): Promise<string> {
async getModelResponse(option: AutoeditModelOptions): Promise<ModelResponse> {
try {
const maxTokens = getMaxOutputTokensForAutoedits(option.codeToRewrite)
const messages: Message[] = getSourcegraphCompatibleChatPrompt({
Expand Down Expand Up @@ -36,7 +36,13 @@ export class SourcegraphChatAdapter implements AutoeditsModelAdapter {
break
}
}
return accumulated

// For direct API calls without HTTP headers, we return an empty object
return {
prediction: accumulated,
responseHeaders: {},
requestUrl: option.url,
}
} catch (error) {
autoeditsOutputChannelLogger.logError(
'getModelResponse',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ describe('SourcegraphCompletionsAdapter', () => {
adapter.client = { complete: mockComplete }

const response = await adapter.getModelResponse(options)
expect(response).toBe('part1part2')
expect(response.prediction).toBe('part1part2')
})

it('handles errors correctly', async () => {
Expand Down
Loading
Loading