Path: blob/main/extensions/copilot/src/extension/chatSessionContext/vscode-node/chatSessionContextProvider.ts
13399 views
/*---------------------------------------------------------------------------------------------1* Copyright (c) Microsoft Corporation. All rights reserved.2* Licensed under the MIT License. See License.txt in the project root for license information.3*--------------------------------------------------------------------------------------------*/45import * as vscode from 'vscode';6import { ConfigKey, IConfigurationService } from '../../../platform/configuration/common/configurationService';7import { IGitService, RepoContext } from '../../../platform/git/common/gitService';8import { Copilot } from '../../../platform/inlineCompletions/common/api';9import { ILanguageContextProviderService, ProviderTarget } from '../../../platform/languageContextProvider/common/languageContextProviderService';10import { ILogService } from '../../../platform/log/common/logService';11import { IExperimentationService } from '../../../platform/telemetry/common/nullExperimentationService';12import { CancellationToken } from '../../../util/vs/base/common/cancellation';13import { Disposable, DisposableStore, IDisposable } from '../../../util/vs/base/common/lifecycle';14import { autorun, IObservable } from '../../../util/vs/base/common/observableInternal';15import { LanguageModelChatMessage, LanguageModelTextPart } from '../../../vscodeTypes';16import { IConversationStore } from '../../conversationStore/node/conversationStore';17import { Conversation } from '../../prompt/common/conversation';1819interface SummaryCache {20readonly cacheKey: string;21readonly promise: Promise<string | undefined>;22}2324const SINGLE_TURN_MESSAGE_LIMIT = 1_000;25const MAX_TOTAL_MESSAGE_LENGTH = 10_000;2627export class ChatSessionContextContribution extends Disposable {2829private readonly _enableChatSessionContextProvider: IObservable<boolean>;30private _branchChangeTime: number | undefined;31private _lastBranchName: string | undefined;32private _summaryCache: SummaryCache | undefined;3334constructor(35@IConfigurationService configurationService: IConfigurationService,36@ILogService private readonly logService: ILogService,37@IExperimentationService experimentationService: IExperimentationService,38@IGitService private readonly gitService: IGitService,39@IConversationStore private readonly conversationStore: IConversationStore,40@ILanguageContextProviderService private readonly languageContextProviderService: ILanguageContextProviderService,41) {42super();43this._enableChatSessionContextProvider = configurationService.getExperimentBasedConfigObservable(ConfigKey.Advanced.ChatSessionContextProvider, experimentationService);44this._register(autorun(reader => {45if (this._enableChatSessionContextProvider.read(reader)) {46reader.store.add(this.register());47reader.store.add(this.trackBranchChanges());48}49}));50}5152private trackBranchChanges(): IDisposable {53const disposables = new DisposableStore();5455// Track branch changes for each repository56disposables.add(this.gitService.onDidOpenRepository(repo => {57disposables.add(this.watchBranchChanges(repo));58}));5960// Watch already opened repositories61for (const repo of this.gitService.repositories) {62disposables.add(this.watchBranchChanges(repo));63}6465return disposables;66}6768private watchBranchChanges(repo: RepoContext): IDisposable {69const headBranchObs = repo.headBranchNameObs;70return autorun(reader => {71const branchName = headBranchObs.read(reader);72if (branchName !== this._lastBranchName) {73this._lastBranchName = branchName;74this._branchChangeTime = Date.now();75// Invalidate the cache when the branch changes76this._summaryCache = undefined;77this.logService.trace(`[ChatSessionContextProvider] Branch changed to: ${branchName}`);78}79});80}8182private register(): IDisposable {83const disposables = new DisposableStore();84try {85const resolver = new ContextResolver(86this.logService,87this.conversationStore,88() => this._branchChangeTime,89() => this._summaryCache,90(cache) => { this._summaryCache = cache; }91);92const nesProvider: Copilot.ContextProvider<Copilot.SupportedContextItem> = {93id: 'chat-session-context-provider',94selector: '*',95resolver: resolver96};97const scmProvider: Copilot.ContextProvider<Copilot.SupportedContextItem> = {98id: 'chat-session-context-provider',99selector: { language: 'scminput' },100resolver: resolver101};102disposables.add(this.languageContextProviderService.registerContextProvider(nesProvider, [ProviderTarget.NES]));103disposables.add(this.languageContextProviderService.registerContextProvider(scmProvider, [ProviderTarget.Completions]));104} catch (error) {105this.logService.error('Error registering chat session context provider:', error);106}107return disposables;108}109}110111class ContextResolver implements Copilot.ContextResolver<Copilot.SupportedContextItem> {112113constructor(114private readonly logService: ILogService,115private readonly conversationStore: IConversationStore,116private readonly getBranchChangeTime: () => number | undefined,117private readonly getSummaryCache: () => SummaryCache | undefined,118private readonly setSummaryCache: (cache: SummaryCache | undefined) => void,119) { }120121async resolve(request: Copilot.ResolveRequest, token: CancellationToken): Promise<Copilot.SupportedContextItem[]> {122try {123const conversation = this.conversationStore.lastConversation;124if (!conversation) {125return [];126}127128// Check if the conversation started before the branch change129const branchChangeTime = this.getBranchChangeTime();130const firstTurnStartTime = this.getFirstTurnStartTime(conversation);131if (branchChangeTime !== undefined && firstTurnStartTime < branchChangeTime) {132this.logService.trace(`[ChatSessionContextProvider] Skipping conversation that started before branch change`);133return [];134}135136// Check if we have a cached or in-progress summary for this conversation137const existingCache = this.getSummaryCache();138const cacheKey = this.getCacheKey(conversation);139if (existingCache && existingCache.cacheKey === cacheKey) {140// Await the existing promise (whether it's still running or already resolved)141const summary = await existingCache.promise;142if (summary) {143return this.createTraitFromSummary(summary);144}145return [];146}147148// Start a new summary generation and cache the promise immediately149// Note: We don't pass the cancellation token to avoid cancelling on subsequent calls150const summaryPromise = this.generateSummary(conversation);151this.setSummaryCache({152cacheKey,153promise: summaryPromise154});155156const summary = await summaryPromise;157if (summary) {158return this.createTraitFromSummary(summary);159}160return [];161} catch (error) {162this.logService.error('[ChatSessionContextProvider] Error resolving context:', error);163return [];164}165}166167private getFirstTurnStartTime(conversation: Conversation): number {168const turns = conversation.turns;169if (turns.length === 0) {170return Date.now();171}172return turns[0].startTime;173}174175private getCacheKey(conversation: Conversation): string {176return `${conversation.sessionId}:${conversation.turns.length}`;177}178179private async generateSummary(conversation: Conversation): Promise<string | undefined> {180try {181// Build a prompt from the conversation182const conversationContent = this.buildConversationContent(conversation);183if (!conversationContent) {184return undefined;185}186187// Select a mini model (gpt-4o-mini)188const models = await vscode.lm.selectChatModels({ family: 'gpt-4o-mini', vendor: 'copilot' });189if (models.length === 0) {190// Fallback to any available model191const allModels = await vscode.lm.selectChatModels({ vendor: 'copilot' });192if (allModels.length === 0) {193this.logService.trace('[ChatSessionContextProvider] No language models available');194return undefined;195}196models.push(allModels[0]);197}198199const model = models[0];200const systemPrompt = `You are a helpful assistant that summarizes conversations. Given a chat conversation between a user and an AI assistant, describe what the user is trying to accomplish in 5 sentences or less. Focus on the user's intent and goals.`;201202const messages = [203LanguageModelChatMessage.User(`${systemPrompt}\n\nConversation:\n${conversationContent}\n\nSummarize what the user is trying to do in 5 sentences or less:`)204];205206// Note: We intentionally don't pass a cancellation token to avoid cancelling207// when multiple resolve() calls come in quick succession208const response = await model.sendRequest(messages, {});209210let summary = '';211for await (const part of response.stream) {212if (part instanceof LanguageModelTextPart) {213summary += part.value;214}215}216217return summary.trim() || undefined;218} catch (error) {219this.logService.error('[ChatSessionContextProvider] Error generating summary:', error);220return undefined;221}222}223224private buildConversationContent(conversation: Conversation): string | undefined {225const turns = conversation.turns;226if (turns.length === 0) {227return undefined;228}229230const lines: string[] = [];231for (const turn of turns) {232// Add user message233if (turn.request?.message) {234lines.push(`User: ${turn.request.message}`);235}236237// Add assistant response238if (turn.responseMessage?.message) {239// Truncate long responses240const truncatedIndicator = '\n... (truncated) ...\n';241const responseMessage = turn.responseMessage.message;242const truncatedMessage = responseMessage.length > SINGLE_TURN_MESSAGE_LIMIT + truncatedIndicator.length243? responseMessage.substring(0, SINGLE_TURN_MESSAGE_LIMIT / 2) + truncatedIndicator + responseMessage.substring(responseMessage.length - SINGLE_TURN_MESSAGE_LIMIT / 2)244: responseMessage;245lines.push(`Assistant: ${truncatedMessage}`);246}247}248249if (lines.length === 0) {250return undefined;251}252253// Make sure the total length is within limits254let characterCount = 0;255const linesToKeep = [];256for (let i = lines.length - 1; i >= 0; i--) {257linesToKeep.unshift(lines[i]);258characterCount += lines[i].length;259if (characterCount >= MAX_TOTAL_MESSAGE_LENGTH) {260break;261}262}263264if (linesToKeep.length < lines.length) {265linesToKeep.unshift('... (truncated) ...');266}267268return linesToKeep.join('\n\n');269}270271private createTraitFromSummary(summary: string): Copilot.Trait[] {272return [{273name: 'User\'s current task context',274value: summary,275importance: 100276}];277}278}279280281