Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(audoedit): consistent use of the ouptut channel logger #6472

Merged
merged 3 commits into from
Dec 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions vscode/src/autoedits/adapters/cody-gateway.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { currentResolvedConfig, dotcomTokenToGatewayToken } from '@sourcegraph/cody-shared'

import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import {
Expand All @@ -24,7 +24,11 @@ export class CodyGatewayAdapter implements AutoeditsModelAdapter {
}
return response.choices[0].text
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling Cody Gateway:', error)
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'Error calling Cody Gateway:',
error
)
throw error
}
}
Expand All @@ -33,7 +37,7 @@ export class CodyGatewayAdapter implements AutoeditsModelAdapter {
const resolvedConfig = await currentResolvedConfig()
const fastPathAccessToken = dotcomTokenToGatewayToken(resolvedConfig.auth.accessToken)
if (!fastPathAccessToken) {
autoeditsLogger.logError('Autoedits', 'FastPath access token is not available')
autoeditsOutputChannelLogger.logError('getApiKey', 'FastPath access token is not available')
throw new Error('FastPath access token is not available')
}
return fastPathAccessToken
Expand Down
7 changes: 5 additions & 2 deletions vscode/src/autoedits/adapters/create-adapter.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import type { AutoEditsModelConfig, ChatClient } from '@sourcegraph/cody-shared'

import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditsModelAdapter } from './base'
import { CodyGatewayAdapter } from './cody-gateway'
Expand Down Expand Up @@ -30,7 +30,10 @@ export function createAutoeditsModelAdapter({
? new SourcegraphChatAdapter(chatClient)
: new SourcegraphCompletionsAdapter()
default:
autoeditsLogger.logDebug('Config', `Provider ${providerName} not supported`)
autoeditsOutputChannelLogger.logDebug(
'createAutoeditsModelAdapter',
`Provider ${providerName} not supported`
)
throw new Error(`Provider ${providerName} not supported`)
}
}
13 changes: 10 additions & 3 deletions vscode/src/autoedits/adapters/fireworks.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { autoeditsProviderConfig } from '../autoedits-config'
import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import {
Expand All @@ -16,7 +16,10 @@ export class FireworksAdapter implements AutoeditsModelAdapter {
const apiKey = autoeditsProviderConfig.experimentalAutoeditsConfigOverride?.apiKey

if (!apiKey) {
autoeditsLogger.logError('Autoedits', 'No api key provided in the config override')
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'No api key provided in the config override'
)
throw new Error('No api key provided in the config override')
}
const response = await getModelResponse(option.url, body, apiKey)
Expand All @@ -25,7 +28,11 @@ export class FireworksAdapter implements AutoeditsModelAdapter {
}
return response.choices[0].text
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling Fireworks API:', error)
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'Error calling Fireworks API:',
error
)
throw error
}
}
Expand Down
9 changes: 6 additions & 3 deletions vscode/src/autoedits/adapters/openai.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { autoeditsProviderConfig } from '../autoedits-config'
import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'

import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import { getModelResponse, getOpenaiCompatibleChatPrompt } from './utils'
Expand All @@ -10,7 +10,10 @@ export class OpenAIAdapter implements AutoeditsModelAdapter {
const apiKey = autoeditsProviderConfig.experimentalAutoeditsConfigOverride?.apiKey

if (!apiKey) {
autoeditsLogger.logError('Autoedits', 'No api key provided in the config override')
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'No api key provided in the config override'
)
throw new Error('No api key provided in the config override')
}

Expand All @@ -32,7 +35,7 @@ export class OpenAIAdapter implements AutoeditsModelAdapter {
)
return response.choices[0].message.content
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling OpenAI API:', error)
autoeditsOutputChannelLogger.logError('getModelResponse', 'Error calling OpenAI API:', error)
throw error
}
}
Expand Down
8 changes: 6 additions & 2 deletions vscode/src/autoedits/adapters/sourcegraph-chat.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import type { ChatClient, Message } from '@sourcegraph/cody-shared'
import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'
import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import { getMaxOutputTokensForAutoedits, getSourcegraphCompatibleChatPrompt } from './utils'

Expand Down Expand Up @@ -38,7 +38,11 @@ export class SourcegraphChatAdapter implements AutoeditsModelAdapter {
}
return accumulated
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling Sourcegraph Chat:', error)
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'Error calling Sourcegraph Chat:',
error
)
throw error
}
}
Expand Down
8 changes: 6 additions & 2 deletions vscode/src/autoedits/adapters/sourcegraph-completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import type {
ModelRefStr,
} from '@sourcegraph/cody-shared'
import { defaultCodeCompletionsClient } from '../../completions/default-client'
import { autoeditsLogger } from '../logger'
import { autoeditsOutputChannelLogger } from '../output-channel-logger'
import type { AutoeditModelOptions, AutoeditsModelAdapter } from './base'
import { getMaxOutputTokensForAutoedits, getSourcegraphCompatibleChatPrompt } from './utils'

Expand Down Expand Up @@ -48,7 +48,11 @@ export class SourcegraphCompletionsAdapter implements AutoeditsModelAdapter {
}
return accumulated
} catch (error) {
autoeditsLogger.logDebug('AutoEdits', 'Error calling Sourcegraph Completions:', error)
autoeditsOutputChannelLogger.logError(
'getModelResponse',
'Error calling Sourcegraph Completions:',
error
)
throw error
}
}
Expand Down
157 changes: 78 additions & 79 deletions vscode/src/autoedits/autoedits-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ import { ContextMixer } from '../completions/context/context-mixer'
import { DefaultContextStrategyFactory } from '../completions/context/context-strategy'
import { getCurrentDocContext } from '../completions/get-current-doc-context'

import type { AutoeditsModelAdapter } from './adapters/base'
import type { AutoeditsModelAdapter, AutoeditsPrompt } from './adapters/base'
import { createAutoeditsModelAdapter } from './adapters/create-adapter'
import { autoeditsProviderConfig } from './autoedits-config'
import { FilterPredictionBasedOnRecentEdits } from './filter-prediction-edits'
import { autoeditsLogger } from './logger'
import { autoeditsOutputChannelLogger } from './output-channel-logger'
import type { CodeToReplaceData } from './prompt/prompt-utils'
import { ShortTermPromptStrategy } from './prompt/short-term-diff-prompt-strategy'
import type { DecorationInfo } from './renderer/decorators/base'
Expand Down Expand Up @@ -59,20 +59,20 @@ export interface AutoeditsPrediction {
*/
export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, vscode.Disposable {
private readonly disposables: vscode.Disposable[] = []
private readonly onSelectionChangeDebounced: DebouncedFunc<typeof this.onSelectionChange>
/** Keeps track of the last time the text was changed in the editor. */
private lastTextChangeTimeStamp: number | undefined
private readonly onSelectionChangeDebounced: DebouncedFunc<typeof this.onSelectionChange>
public readonly rendererManager: AutoEditsRendererManager
private readonly modelAdapter: AutoeditsModelAdapter

private readonly promptStrategy = new ShortTermPromptStrategy()
public readonly filterPrediction = new FilterPredictionBasedOnRecentEdits()
private readonly contextMixer = new ContextMixer({
strategyFactory: new DefaultContextStrategyFactory(Observable.of(AUTOEDITS_CONTEXT_STRATEGY)),
contextRankingStrategy: ContextRankingStrategy.TimeBased,
dataCollectionEnabled: false,
})

public readonly rendererManager: AutoEditsRendererManager
public readonly filterPrediction = new FilterPredictionBasedOnRecentEdits()

constructor(chatClient: ChatClient) {
this.modelAdapter = createAutoeditsModelAdapter({
providerName: autoeditsProviderConfig.provider,
Expand Down Expand Up @@ -135,9 +135,10 @@ export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, v
public async provideInlineCompletionItems(
document: vscode.TextDocument,
position: vscode.Position,
context: vscode.InlineCompletionContext,
inlineCompletionContext: vscode.InlineCompletionContext,
token?: vscode.CancellationToken
): Promise<vscode.InlineCompletionItem[] | vscode.InlineCompletionList | null> {
const start = Date.now()
const controller = new AbortController()
const abortSignal = controller.signal
token?.onCancellationRequested(() => controller.abort())
Expand All @@ -154,39 +155,63 @@ export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, v
maxSuffixLength: tokensToChars(autoeditsProviderConfig.tokenLimit.suffixTokens),
})

const autoeditResponse = await this.inferEdit({
const { context } = await this.contextMixer.getContext({
document,
position,
docContext,
maxChars: 32_000,
})
if (abortSignal.aborted) {
return null
}

const { codeToReplaceData, prompt } = this.promptStrategy.getPromptForModelType({
document,
position,
docContext,
abortSignal,
context,
tokenBudget: autoeditsProviderConfig.tokenLimit,
isChatModel: autoeditsProviderConfig.isChatModel,
})

if (abortSignal.aborted || !autoeditResponse) {
const initialPrediction = await this.getPrediction({
document,
position,
prompt,
codeToReplaceData,
})
if (abortSignal?.aborted || !initialPrediction) {
return null
}

let { prediction, codeToReplaceData } = autoeditResponse
autoeditsOutputChannelLogger.logDebug(
'provideInlineCompletionItems',
`========================== Response:\n${initialPrediction}\n` +
`========================== Time Taken: ${Date.now() - start}ms`
)

const prediction = shrinkPredictionUntilSuffix({
prediction: initialPrediction,
codeToReplaceData,
})

const { codeToRewrite } = codeToReplaceData
if (prediction === codeToRewrite) {
autoeditsOutputChannelLogger.logDebug('skip', 'prediction equals to code to rewrite')
return null
}

const shouldFilterPredictionBasedRecentEdits = this.filterPrediction.shouldFilterPrediction(
document.uri,
const shouldFilterPredictionBasedRecentEdits = this.filterPrediction.shouldFilterPrediction({
uri: document.uri,
prediction,
codeToRewrite
)
codeToRewrite,
})

if (shouldFilterPredictionBasedRecentEdits) {
autoeditsLogger.logDebug('Autoedits', 'Skipping autoedit - based on recent edits')
autoeditsOutputChannelLogger.logDebug('skip', 'based on recent edits')
return null
}

prediction = shrinkPredictionUntilSuffix(prediction, codeToReplaceData)
if (prediction === codeToRewrite) {
autoeditsLogger.logDebug(
'Autoedits',
'Skipping autoedit - prediction equals to code to rewrite'
)
return null
}
const decorationInfo = getDecorationInfoFromPrediction(document, prediction, codeToReplaceData)

if (
Expand All @@ -196,10 +221,7 @@ export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, v
suffix: codeToReplaceData.suffixInArea + codeToReplaceData.suffixAfterArea,
})
) {
autoeditsLogger.logDebug(
'Autoedits',
'Skipping autoedit - predicted text already exists in suffix'
)
autoeditsOutputChannelLogger.logDebug('skip', 'prediction equals to code to rewrite')
return null
}

Expand All @@ -212,70 +234,47 @@ export class AutoeditsProvider implements vscode.InlineCompletionItemProvider, v
docContext,
decorationInfo,
})

return inlineCompletions
}

private async inferEdit(options: AutoEditsProviderOptions): Promise<AutoeditsPrediction | null> {
const start = Date.now()
const { document, position, docContext, abortSignal } = options

const { context } = await this.contextMixer.getContext({
document,
position,
docContext,
maxChars: 32_000,
})

const { codeToReplace, prompt } = this.promptStrategy.getPromptForModelType({
document,
position,
docContext,
context,
tokenBudget: autoeditsProviderConfig.tokenLimit,
isChatModel: autoeditsProviderConfig.isChatModel,
})

let response: string | undefined = undefined
private async getPrediction({
document,
position,
codeToReplaceData,
prompt,
}: {
document: vscode.TextDocument
position: vscode.Position
codeToReplaceData: CodeToReplaceData
prompt: AutoeditsPrompt
}): Promise<string | undefined> {
if (autoeditsProviderConfig.isMockResponseFromCurrentDocumentTemplateEnabled) {
const responseMetadata = extractAutoEditResponseFromCurrentDocumentCommentTemplate(
document,
position
)

if (responseMetadata) {
response = shrinkReplacerTextToCodeToReplaceRange(responseMetadata, codeToReplace)
const prediction = shrinkReplacerTextToCodeToReplaceRange(
responseMetadata,
codeToReplaceData
)

if (prediction) {
return prediction
}
}
}

if (response === undefined) {
response = await this.modelAdapter.getModelResponse({
url: autoeditsProviderConfig.url,
model: autoeditsProviderConfig.model,
prompt,
codeToRewrite: codeToReplace.codeToRewrite,
userId: (await currentResolvedConfig()).clientState.anonymousUserID,
isChatModel: autoeditsProviderConfig.isChatModel,
})
}

if (abortSignal?.aborted || !response) {
return null
}

autoeditsLogger.logDebug(
'Autoedits',
'========================== Response:\n',
response,
'\n',
'========================== Time Taken For LLM (Msec): ',
(Date.now() - start).toString(),
'\n'
)

return {
codeToReplaceData: codeToReplace,
prediction: response,
}
return this.modelAdapter.getModelResponse({
url: autoeditsProviderConfig.url,
model: autoeditsProviderConfig.model,
prompt,
codeToRewrite: codeToReplaceData.codeToRewrite,
userId: (await currentResolvedConfig()).clientState.anonymousUserID,
isChatModel: autoeditsProviderConfig.isChatModel,
})
}

public dispose(): void {
Expand Down
Loading
Loading