Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/chatSessionContext/vscode-node/chatSessionContextProvider.ts
13399 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import * as vscode from 'vscode';
7
import { ConfigKey, IConfigurationService } from '../../../platform/configuration/common/configurationService';
8
import { IGitService, RepoContext } from '../../../platform/git/common/gitService';
9
import { Copilot } from '../../../platform/inlineCompletions/common/api';
10
import { ILanguageContextProviderService, ProviderTarget } from '../../../platform/languageContextProvider/common/languageContextProviderService';
11
import { ILogService } from '../../../platform/log/common/logService';
12
import { IExperimentationService } from '../../../platform/telemetry/common/nullExperimentationService';
13
import { CancellationToken } from '../../../util/vs/base/common/cancellation';
14
import { Disposable, DisposableStore, IDisposable } from '../../../util/vs/base/common/lifecycle';
15
import { autorun, IObservable } from '../../../util/vs/base/common/observableInternal';
16
import { LanguageModelChatMessage, LanguageModelTextPart } from '../../../vscodeTypes';
17
import { IConversationStore } from '../../conversationStore/node/conversationStore';
18
import { Conversation } from '../../prompt/common/conversation';
19
20
interface SummaryCache {
21
readonly cacheKey: string;
22
readonly promise: Promise<string | undefined>;
23
}
24
25
const SINGLE_TURN_MESSAGE_LIMIT = 1_000;
26
const MAX_TOTAL_MESSAGE_LENGTH = 10_000;
27
28
export class ChatSessionContextContribution extends Disposable {
29
30
private readonly _enableChatSessionContextProvider: IObservable<boolean>;
31
private _branchChangeTime: number | undefined;
32
private _lastBranchName: string | undefined;
33
private _summaryCache: SummaryCache | undefined;
34
35
constructor(
36
@IConfigurationService configurationService: IConfigurationService,
37
@ILogService private readonly logService: ILogService,
38
@IExperimentationService experimentationService: IExperimentationService,
39
@IGitService private readonly gitService: IGitService,
40
@IConversationStore private readonly conversationStore: IConversationStore,
41
@ILanguageContextProviderService private readonly languageContextProviderService: ILanguageContextProviderService,
42
) {
43
super();
44
this._enableChatSessionContextProvider = configurationService.getExperimentBasedConfigObservable(ConfigKey.Advanced.ChatSessionContextProvider, experimentationService);
45
this._register(autorun(reader => {
46
if (this._enableChatSessionContextProvider.read(reader)) {
47
reader.store.add(this.register());
48
reader.store.add(this.trackBranchChanges());
49
}
50
}));
51
}
52
53
private trackBranchChanges(): IDisposable {
54
const disposables = new DisposableStore();
55
56
// Track branch changes for each repository
57
disposables.add(this.gitService.onDidOpenRepository(repo => {
58
disposables.add(this.watchBranchChanges(repo));
59
}));
60
61
// Watch already opened repositories
62
for (const repo of this.gitService.repositories) {
63
disposables.add(this.watchBranchChanges(repo));
64
}
65
66
return disposables;
67
}
68
69
private watchBranchChanges(repo: RepoContext): IDisposable {
70
const headBranchObs = repo.headBranchNameObs;
71
return autorun(reader => {
72
const branchName = headBranchObs.read(reader);
73
if (branchName !== this._lastBranchName) {
74
this._lastBranchName = branchName;
75
this._branchChangeTime = Date.now();
76
// Invalidate the cache when the branch changes
77
this._summaryCache = undefined;
78
this.logService.trace(`[ChatSessionContextProvider] Branch changed to: ${branchName}`);
79
}
80
});
81
}
82
83
private register(): IDisposable {
84
const disposables = new DisposableStore();
85
try {
86
const resolver = new ContextResolver(
87
this.logService,
88
this.conversationStore,
89
() => this._branchChangeTime,
90
() => this._summaryCache,
91
(cache) => { this._summaryCache = cache; }
92
);
93
const nesProvider: Copilot.ContextProvider<Copilot.SupportedContextItem> = {
94
id: 'chat-session-context-provider',
95
selector: '*',
96
resolver: resolver
97
};
98
const scmProvider: Copilot.ContextProvider<Copilot.SupportedContextItem> = {
99
id: 'chat-session-context-provider',
100
selector: { language: 'scminput' },
101
resolver: resolver
102
};
103
disposables.add(this.languageContextProviderService.registerContextProvider(nesProvider, [ProviderTarget.NES]));
104
disposables.add(this.languageContextProviderService.registerContextProvider(scmProvider, [ProviderTarget.Completions]));
105
} catch (error) {
106
this.logService.error('Error registering chat session context provider:', error);
107
}
108
return disposables;
109
}
110
}
111
112
class ContextResolver implements Copilot.ContextResolver<Copilot.SupportedContextItem> {
113
114
constructor(
115
private readonly logService: ILogService,
116
private readonly conversationStore: IConversationStore,
117
private readonly getBranchChangeTime: () => number | undefined,
118
private readonly getSummaryCache: () => SummaryCache | undefined,
119
private readonly setSummaryCache: (cache: SummaryCache | undefined) => void,
120
) { }
121
122
async resolve(request: Copilot.ResolveRequest, token: CancellationToken): Promise<Copilot.SupportedContextItem[]> {
123
try {
124
const conversation = this.conversationStore.lastConversation;
125
if (!conversation) {
126
return [];
127
}
128
129
// Check if the conversation started before the branch change
130
const branchChangeTime = this.getBranchChangeTime();
131
const firstTurnStartTime = this.getFirstTurnStartTime(conversation);
132
if (branchChangeTime !== undefined && firstTurnStartTime < branchChangeTime) {
133
this.logService.trace(`[ChatSessionContextProvider] Skipping conversation that started before branch change`);
134
return [];
135
}
136
137
// Check if we have a cached or in-progress summary for this conversation
138
const existingCache = this.getSummaryCache();
139
const cacheKey = this.getCacheKey(conversation);
140
if (existingCache && existingCache.cacheKey === cacheKey) {
141
// Await the existing promise (whether it's still running or already resolved)
142
const summary = await existingCache.promise;
143
if (summary) {
144
return this.createTraitFromSummary(summary);
145
}
146
return [];
147
}
148
149
// Start a new summary generation and cache the promise immediately
150
// Note: We don't pass the cancellation token to avoid cancelling on subsequent calls
151
const summaryPromise = this.generateSummary(conversation);
152
this.setSummaryCache({
153
cacheKey,
154
promise: summaryPromise
155
});
156
157
const summary = await summaryPromise;
158
if (summary) {
159
return this.createTraitFromSummary(summary);
160
}
161
return [];
162
} catch (error) {
163
this.logService.error('[ChatSessionContextProvider] Error resolving context:', error);
164
return [];
165
}
166
}
167
168
private getFirstTurnStartTime(conversation: Conversation): number {
169
const turns = conversation.turns;
170
if (turns.length === 0) {
171
return Date.now();
172
}
173
return turns[0].startTime;
174
}
175
176
private getCacheKey(conversation: Conversation): string {
177
return `${conversation.sessionId}:${conversation.turns.length}`;
178
}
179
180
private async generateSummary(conversation: Conversation): Promise<string | undefined> {
181
try {
182
// Build a prompt from the conversation
183
const conversationContent = this.buildConversationContent(conversation);
184
if (!conversationContent) {
185
return undefined;
186
}
187
188
// Select a mini model (gpt-4o-mini)
189
const models = await vscode.lm.selectChatModels({ family: 'gpt-4o-mini', vendor: 'copilot' });
190
if (models.length === 0) {
191
// Fallback to any available model
192
const allModels = await vscode.lm.selectChatModels({ vendor: 'copilot' });
193
if (allModels.length === 0) {
194
this.logService.trace('[ChatSessionContextProvider] No language models available');
195
return undefined;
196
}
197
models.push(allModels[0]);
198
}
199
200
const model = models[0];
201
const systemPrompt = `You are a helpful assistant that summarizes conversations. Given a chat conversation between a user and an AI assistant, describe what the user is trying to accomplish in 5 sentences or less. Focus on the user's intent and goals.`;
202
203
const messages = [
204
LanguageModelChatMessage.User(`${systemPrompt}\n\nConversation:\n${conversationContent}\n\nSummarize what the user is trying to do in 5 sentences or less:`)
205
];
206
207
// Note: We intentionally don't pass a cancellation token to avoid cancelling
208
// when multiple resolve() calls come in quick succession
209
const response = await model.sendRequest(messages, {});
210
211
let summary = '';
212
for await (const part of response.stream) {
213
if (part instanceof LanguageModelTextPart) {
214
summary += part.value;
215
}
216
}
217
218
return summary.trim() || undefined;
219
} catch (error) {
220
this.logService.error('[ChatSessionContextProvider] Error generating summary:', error);
221
return undefined;
222
}
223
}
224
225
private buildConversationContent(conversation: Conversation): string | undefined {
226
const turns = conversation.turns;
227
if (turns.length === 0) {
228
return undefined;
229
}
230
231
const lines: string[] = [];
232
for (const turn of turns) {
233
// Add user message
234
if (turn.request?.message) {
235
lines.push(`User: ${turn.request.message}`);
236
}
237
238
// Add assistant response
239
if (turn.responseMessage?.message) {
240
// Truncate long responses
241
const truncatedIndicator = '\n... (truncated) ...\n';
242
const responseMessage = turn.responseMessage.message;
243
const truncatedMessage = responseMessage.length > SINGLE_TURN_MESSAGE_LIMIT + truncatedIndicator.length
244
? responseMessage.substring(0, SINGLE_TURN_MESSAGE_LIMIT / 2) + truncatedIndicator + responseMessage.substring(responseMessage.length - SINGLE_TURN_MESSAGE_LIMIT / 2)
245
: responseMessage;
246
lines.push(`Assistant: ${truncatedMessage}`);
247
}
248
}
249
250
if (lines.length === 0) {
251
return undefined;
252
}
253
254
// Make sure the total length is within limits
255
let characterCount = 0;
256
const linesToKeep = [];
257
for (let i = lines.length - 1; i >= 0; i--) {
258
linesToKeep.unshift(lines[i]);
259
characterCount += lines[i].length;
260
if (characterCount >= MAX_TOTAL_MESSAGE_LENGTH) {
261
break;
262
}
263
}
264
265
if (linesToKeep.length < lines.length) {
266
linesToKeep.unshift('... (truncated) ...');
267
}
268
269
return linesToKeep.join('\n\n');
270
}
271
272
private createTraitFromSummary(summary: string): Copilot.Trait[] {
273
return [{
274
name: 'User\'s current task context',
275
value: summary,
276
importance: 100
277
}];
278
}
279
}
280
281