Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/src/vs/workbench/contrib/chat/browser/chatEditing/chatEditingExplanationModelManager.ts
5241 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { CancellationToken, CancellationTokenSource } from '../../../../../base/common/cancellation.js';
7
import { Disposable, IDisposable } from '../../../../../base/common/lifecycle.js';
8
import { ResourceMap } from '../../../../../base/common/map.js';
9
import { IObservable, observableValue } from '../../../../../base/common/observable.js';
10
import { basename } from '../../../../../base/common/resources.js';
11
import { URI } from '../../../../../base/common/uri.js';
12
import { ITextModel } from '../../../../../editor/common/model.js';
13
import { DetailedLineRangeMapping, LineRangeMapping } from '../../../../../editor/common/diff/rangeMapping.js';
14
import { createDecorator } from '../../../../../platform/instantiation/common/instantiation.js';
15
import { InstantiationType, registerSingleton } from '../../../../../platform/instantiation/common/extensions.js';
16
import { ChatMessageRole, ILanguageModelsService } from '../../common/languageModels.js';
17
import { ExtensionIdentifier } from '../../../../../platform/extensions/common/extensions.js';
18
import * as nls from '../../../../../nls.js';
19
20
/**
21
* Simple diff info interface for explanation generation
22
*/
23
export interface IExplanationDiffInfo {
24
readonly changes: readonly (LineRangeMapping | DetailedLineRangeMapping)[];
25
readonly identical: boolean;
26
readonly originalModel: ITextModel;
27
readonly modifiedModel: ITextModel;
28
}
29
30
/**
31
* A single explanation for a change
32
*/
33
export interface IChangeExplanation {
34
readonly uri: URI;
35
readonly startLineNumber: number;
36
readonly endLineNumber: number;
37
readonly originalText: string;
38
readonly modifiedText: string;
39
readonly explanation: string;
40
}
41
42
/**
43
* Progress state for explanation generation
44
*/
45
export type ExplanationProgress = 'idle' | 'loading' | 'complete' | 'error';
46
47
/**
48
* Explanation state for a single URI
49
*/
50
export interface IExplanationState {
51
readonly progress: ExplanationProgress;
52
readonly explanations: readonly IChangeExplanation[];
53
readonly diffInfo: IExplanationDiffInfo;
54
readonly chatSessionResource: URI | undefined;
55
readonly errorMessage?: string;
56
}
57
58
/**
59
* Handle returned when generating explanations
60
*/
61
export interface IExplanationGenerationHandle extends IDisposable {
62
/**
63
* The URIs being explained
64
*/
65
readonly uris: readonly URI[];
66
67
/**
68
* Promise that resolves when generation is complete
69
*/
70
readonly completed: Promise<void>;
71
}
72
73
export const IChatEditingExplanationModelManager = createDecorator<IChatEditingExplanationModelManager>('chatEditingExplanationModelManager');
74
75
export interface IChatEditingExplanationModelManager {
76
readonly _serviceBrand: undefined;
77
78
/**
79
* Observable map from URI to explanation state.
80
* When a URI has state, explanations are shown. When removed, they are hidden.
81
* UI code can use autorun or derived to react to state changes.
82
*/
83
readonly state: IObservable<ResourceMap<IExplanationState>>;
84
85
/**
86
* Generates explanations for the given diff infos using a single LLM request.
87
* This allows the model to understand the complete change across files.
88
* Returns a disposable handle for lifecycle management.
89
* The generation can be cancelled by disposing the handle or via the cancellation token.
90
* Disposing the handle also removes the explanations from the state.
91
*
92
* State is updated per-file as explanations are parsed from the response.
93
*
94
* @param diffInfos Array of diff info objects, one per file
95
* @param chatSessionResource Chat session resource for follow-up actions
96
* @param token Cancellation token for external cancellation control
97
* @returns A handle with disposal and completion tracking
98
*/
99
generateExplanations(diffInfos: readonly IExplanationDiffInfo[], chatSessionResource: URI | undefined, token: CancellationToken): IExplanationGenerationHandle;
100
}
101
102
/**
103
* Gets the text content for a change
104
*/
105
function getChangeTexts(change: LineRangeMapping | DetailedLineRangeMapping, diffInfo: IExplanationDiffInfo): { originalText: string; modifiedText: string } {
106
const originalLines: string[] = [];
107
const modifiedLines: string[] = [];
108
109
// Get original text
110
for (let i = change.original.startLineNumber; i < change.original.endLineNumberExclusive; i++) {
111
const line = diffInfo.originalModel.getLineContent(i);
112
originalLines.push(line);
113
}
114
115
// Get modified text
116
for (let i = change.modified.startLineNumber; i < change.modified.endLineNumberExclusive; i++) {
117
const line = diffInfo.modifiedModel.getLineContent(i);
118
modifiedLines.push(line);
119
}
120
121
return {
122
originalText: originalLines.join('\n'),
123
modifiedText: modifiedLines.join('\n')
124
};
125
}
126
127
export class ChatEditingExplanationModelManager extends Disposable implements IChatEditingExplanationModelManager {
128
declare readonly _serviceBrand: undefined;
129
130
private readonly _state = observableValue<ResourceMap<IExplanationState>>(this, new ResourceMap<IExplanationState>());
131
readonly state: IObservable<ResourceMap<IExplanationState>> = this._state;
132
133
constructor(
134
@ILanguageModelsService private readonly _languageModelsService: ILanguageModelsService,
135
) {
136
super();
137
}
138
139
private _updateUriState(uri: URI, uriState: IExplanationState): void {
140
const current = this._state.get();
141
const newState = new ResourceMap<IExplanationState>(current);
142
newState.set(uri, uriState);
143
this._state.set(newState, undefined);
144
}
145
146
private _updateUriStatePartial(uri: URI, partial: Partial<IExplanationState>): void {
147
const current = this._state.get();
148
const existing = current.get(uri);
149
if (existing) {
150
const newState = new ResourceMap<IExplanationState>(current);
151
newState.set(uri, { ...existing, ...partial });
152
this._state.set(newState, undefined);
153
}
154
}
155
156
private _removeUris(uris: readonly URI[]): void {
157
const current = this._state.get();
158
const newState = new ResourceMap<IExplanationState>(current);
159
for (const uri of uris) {
160
newState.delete(uri);
161
}
162
this._state.set(newState, undefined);
163
}
164
165
generateExplanations(diffInfos: readonly IExplanationDiffInfo[], chatSessionResource: URI | undefined, token: CancellationToken): IExplanationGenerationHandle {
166
const uris = diffInfos.map(d => d.modifiedModel.uri);
167
const cts = new CancellationTokenSource(token);
168
169
// Set loading state for all URIs with diffInfo and chatSessionResource
170
for (const diffInfo of diffInfos) {
171
this._updateUriState(diffInfo.modifiedModel.uri, {
172
progress: 'loading',
173
explanations: [],
174
diffInfo,
175
chatSessionResource,
176
});
177
}
178
179
const completed = this._doGenerateExplanations(diffInfos, cts.token);
180
181
return {
182
uris,
183
completed,
184
dispose: () => {
185
cts.dispose(true);
186
this._removeUris(uris);
187
}
188
};
189
}
190
191
private async _doGenerateExplanations(diffInfos: readonly IExplanationDiffInfo[], cancellationToken: CancellationToken): Promise<void> {
192
// Filter out empty diffs and fire empty events for them
193
const nonEmptyDiffs: IExplanationDiffInfo[] = [];
194
for (const diffInfo of diffInfos) {
195
if (diffInfo.changes.length === 0 || diffInfo.identical) {
196
this._updateUriStatePartial(diffInfo.modifiedModel.uri, {
197
progress: 'complete',
198
explanations: [],
199
});
200
} else {
201
nonEmptyDiffs.push(diffInfo);
202
}
203
}
204
205
if (nonEmptyDiffs.length === 0) {
206
return;
207
}
208
209
// Build change data for all files
210
interface FileChangeData {
211
uri: URI;
212
fileName: string;
213
changes: {
214
startLineNumber: number;
215
endLineNumber: number;
216
originalText: string;
217
modifiedText: string;
218
}[];
219
}
220
221
const fileChanges: FileChangeData[] = nonEmptyDiffs.map(diffInfo => {
222
const uri = diffInfo.modifiedModel.uri;
223
const fileName = basename(uri);
224
const changes = diffInfo.changes.map(change => {
225
const { originalText, modifiedText } = getChangeTexts(change, diffInfo);
226
return {
227
startLineNumber: change.modified.startLineNumber,
228
endLineNumber: change.modified.endLineNumberExclusive - 1,
229
originalText,
230
modifiedText,
231
};
232
});
233
return { uri, fileName, changes };
234
});
235
236
// Total number of changes across all files
237
const totalChanges = fileChanges.reduce((sum, f) => sum + f.changes.length, 0);
238
239
try {
240
// Select a high-end model for better understanding of all changes together
241
let models = await this._languageModelsService.selectLanguageModels({ vendor: 'copilot', family: 'claude-3.5-sonnet' });
242
if (!models.length) {
243
models = await this._languageModelsService.selectLanguageModels({ vendor: 'copilot', family: 'gpt-4o' });
244
}
245
if (!models.length) {
246
models = await this._languageModelsService.selectLanguageModels({ vendor: 'copilot', family: 'gpt-4' });
247
}
248
if (!models.length) {
249
// Fallback to any available model
250
models = await this._languageModelsService.selectLanguageModels({ vendor: 'copilot' });
251
}
252
if (!models.length) {
253
for (const fileData of fileChanges) {
254
this._updateUriStatePartial(fileData.uri, {
255
progress: 'error',
256
explanations: [],
257
errorMessage: nls.localize('noModelAvailable', "No language model available"),
258
});
259
}
260
return;
261
}
262
263
if (cancellationToken.isCancellationRequested) {
264
return;
265
}
266
267
// Build a prompt with all changes from all files
268
let changeIndex = 0;
269
const changesDescription = fileChanges.map(fileData => {
270
return fileData.changes.map(data => {
271
const desc = `=== CHANGE ${changeIndex} (File: ${fileData.fileName}, Lines ${data.startLineNumber}-${data.endLineNumber}) ===
272
BEFORE:
273
${data.originalText || '(empty)'}
274
275
AFTER:
276
${data.modifiedText || '(empty)'}`;
277
changeIndex++;
278
return desc;
279
}).join('\n\n');
280
}).join('\n\n');
281
282
const fileCount = fileChanges.length;
283
const prompt = `Analyze these ${totalChanges} code changes across ${fileCount} file${fileCount > 1 ? 's' : ''} and provide a brief explanation for each one.
284
These changes are part of a single coherent modification, so consider how they relate to each other.
285
286
${changesDescription}
287
288
Respond with a JSON array containing exactly ${totalChanges} objects, one for each change in order.
289
Each object should have an "explanation" field with a brief sentence (max 15 words) explaining what changed and why.
290
Be specific about the actual code changes. Return ONLY valid JSON, no markdown.
291
292
Example response format:
293
[{"explanation": "Added null check to prevent crash"}, {"explanation": "Renamed variable for clarity"}]`;
294
295
const response = await this._languageModelsService.sendChatRequest(
296
models[0],
297
new ExtensionIdentifier('core'),
298
[{ role: ChatMessageRole.User, content: [{ type: 'text', value: prompt }] }],
299
{},
300
cancellationToken
301
);
302
303
let responseText = '';
304
for await (const part of response.stream) {
305
if (cancellationToken.isCancellationRequested) {
306
return;
307
}
308
if (Array.isArray(part)) {
309
for (const p of part) {
310
if (p.type === 'text') {
311
responseText += p.value;
312
}
313
}
314
} else if (part.type === 'text') {
315
responseText += part.value;
316
}
317
}
318
319
await response.result;
320
321
if (cancellationToken.isCancellationRequested) {
322
return;
323
}
324
325
// Parse the JSON response
326
let parsed: { explanation: string }[] = [];
327
try {
328
// Handle potential markdown wrapping
329
let jsonText = responseText.trim();
330
if (jsonText.startsWith('```')) {
331
jsonText = jsonText.replace(/^```(?:json)?\n?/, '').replace(/\n?```$/, '');
332
}
333
parsed = JSON.parse(jsonText);
334
} catch {
335
// JSON parsing failed - will use default messages
336
}
337
338
// Map explanations back to files
339
let parsedIndex = 0;
340
for (const fileData of fileChanges) {
341
const explanations: IChangeExplanation[] = [];
342
for (const data of fileData.changes) {
343
const parsedExplanation = parsed[parsedIndex]?.explanation?.trim() || nls.localize('codeWasModified', "Code was modified.");
344
explanations.push({
345
uri: fileData.uri,
346
startLineNumber: data.startLineNumber,
347
endLineNumber: data.endLineNumber,
348
originalText: data.originalText,
349
modifiedText: data.modifiedText,
350
explanation: parsedExplanation,
351
});
352
parsedIndex++;
353
}
354
355
this._updateUriStatePartial(fileData.uri, {
356
progress: 'complete',
357
explanations,
358
});
359
}
360
} catch (e) {
361
if (!cancellationToken.isCancellationRequested) {
362
const errorMessage = e instanceof Error ? e.message : nls.localize('explanationFailed', "Failed to generate explanations");
363
for (const fileData of fileChanges) {
364
this._updateUriStatePartial(fileData.uri, {
365
progress: 'error',
366
explanations: [],
367
errorMessage,
368
});
369
}
370
}
371
}
372
}
373
}
374
375
registerSingleton(IChatEditingExplanationModelManager, ChatEditingExplanationModelManager, InstantiationType.Delayed);
376
377