Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/inlineChat2/node/progressMessages.ts
13399 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { ChatFetchResponseType, ChatLocation } from '../../../platform/chat/common/commonTypes';
7
import { IEndpointProvider } from '../../../platform/endpoint/common/endpointProvider';
8
import { IEnvService } from '../../../platform/env/common/envService';
9
import { ILogService } from '../../../platform/log/common/logService';
10
import { CancellationToken } from '../../../util/vs/base/common/cancellation';
11
import { basename } from '../../../util/vs/base/common/resources';
12
import { IInstantiationService } from '../../../util/vs/platform/instantiation/common/instantiation';
13
import { IDocumentContext } from '../../prompt/node/documentContext';
14
import { renderPromptElement } from '../../prompts/node/base/promptRenderer';
15
import { ContextualProgressMessagePrompt, ContextualProgressMessagePromptProps, ProgressMessageScenario, ProgressMessagesPrompt, ProgressMessagesPromptProps } from './progressMessagesPrompt';
16
17
const MESSAGES_PER_FETCH = 10;
18
const REFETCH_THRESHOLD = 3;
19
20
interface MessageCache {
21
readonly messages: string[];
22
readonly fetchInProgress: boolean;
23
}
24
25
/**
26
* Provides catchy progress messages for inline chat operations.
27
* Pre-fetches messages and automatically replenishes when running low.
28
*/
29
export class InlineChatProgressMessages {
30
31
private readonly _caches = new Map<ProgressMessageScenario, MessageCache>();
32
private readonly _pendingFetches = new Map<ProgressMessageScenario, Promise<void>>();
33
34
constructor(
35
@ILogService private readonly _logService: ILogService,
36
@IEndpointProvider private readonly _endpointProvider: IEndpointProvider,
37
@IInstantiationService private readonly _instantiationService: IInstantiationService,
38
@IEnvService private readonly _envService: IEnvService,
39
) {
40
// Initialize caches with fallback messages
41
this._caches.set('generate', { messages: [...InlineChatProgressMessages._FALLBACK_GENERATE], fetchInProgress: false });
42
this._caches.set('edit', { messages: [...InlineChatProgressMessages._FALLBACK_EDIT], fetchInProgress: false });
43
44
this.prewarm();
45
}
46
47
private static readonly _FALLBACK_GENERATE: readonly string[] = [
48
'Working...',
49
];
50
51
private static readonly _FALLBACK_EDIT: readonly string[] = [
52
'Working...',
53
];
54
55
/**
56
* Gets the next progress message for the given scenario.
57
* Automatically triggers a background fetch when running low on messages.
58
*/
59
getNextMessage(scenario: ProgressMessageScenario): string {
60
const cache = this._caches.get(scenario);
61
if (!cache || cache.messages.length === 0) {
62
// Should never happen, but use fallback
63
const fallbacks = scenario === 'generate'
64
? InlineChatProgressMessages._FALLBACK_GENERATE
65
: InlineChatProgressMessages._FALLBACK_EDIT;
66
return fallbacks[Math.floor(Math.random() * fallbacks.length)];
67
}
68
69
// Get a random message and remove it from the cache
70
const index = Math.floor(Math.random() * cache.messages.length);
71
const message = cache.messages[index];
72
const newMessages = [...cache.messages];
73
newMessages.splice(index, 1);
74
75
this._caches.set(scenario, { messages: newMessages, fetchInProgress: cache.fetchInProgress });
76
77
// Trigger background fetch if running low
78
if (newMessages.length < REFETCH_THRESHOLD && !cache.fetchInProgress) {
79
this._triggerBackgroundFetch(scenario);
80
}
81
82
return message;
83
}
84
85
/**
86
* Gets a contextual progress message based on the user's prompt and document context.
87
* Falls back to generic messages if contextual generation fails or times out.
88
*/
89
async getContextualMessage(prompt: string, documentContext: IDocumentContext, token: CancellationToken): Promise<string> {
90
const scenario: ProgressMessageScenario = documentContext.selection.isEmpty ? 'generate' : 'edit';
91
92
if (this._envService.isSimulation()) {
93
return this.getNextMessage(scenario);
94
}
95
96
try {
97
const endpoint = await this._endpointProvider.getChatEndpoint('copilot-fast');
98
99
const selectedCode = documentContext.selection.isEmpty
100
? undefined
101
: documentContext.document.getText(documentContext.selection);
102
103
const props: ContextualProgressMessagePromptProps = {
104
prompt,
105
fileName: basename(documentContext.document.uri),
106
uri: documentContext.document.uri,
107
languageId: documentContext.document.languageId,
108
selectedCode,
109
};
110
111
const { messages: promptMessages } = await renderPromptElement(
112
this._instantiationService,
113
endpoint,
114
ContextualProgressMessagePrompt,
115
props
116
);
117
118
const response = await endpoint.makeChatRequest2({
119
debugName: 'contextualProgressMessage',
120
messages: promptMessages,
121
finishedCb: undefined,
122
location: ChatLocation.Editor,
123
userInitiatedRequest: false,
124
isConversationRequest: false,
125
}, token);
126
127
if (response.type === ChatFetchResponseType.Success) {
128
const message = this._parseContextualMessage(response.value);
129
if (message) {
130
this._logService.trace(`[InlineChatProgressMessages] Generated contextual message: ${message}`);
131
return message;
132
}
133
}
134
} catch (err) {
135
this._logService.trace(`[InlineChatProgressMessages] Contextual message generation failed, using fallback: ${err}`);
136
}
137
138
// Fall back to generic message
139
return this.getNextMessage(scenario);
140
}
141
142
private _parseContextualMessage(responseText: string): string | undefined {
143
const trimmed = responseText.trim();
144
// Remove any surrounding quotes if present
145
const unquoted = trimmed.replace(/^["']|["']$/g, '');
146
// Validate the message is reasonable length
147
if (unquoted.length > 0 && unquoted.length < 60) {
148
return unquoted;
149
}
150
return undefined;
151
}
152
153
/**
154
* Pre-warms the cache by fetching messages for both scenarios.
155
* Can be called during extension activation.
156
*/
157
prewarm(): void {
158
this._triggerBackgroundFetch('generate');
159
this._triggerBackgroundFetch('edit');
160
}
161
162
private _triggerBackgroundFetch(scenario: ProgressMessageScenario): void {
163
if (this._pendingFetches.has(scenario)) {
164
return;
165
}
166
167
if (this._envService.isSimulation()) {
168
return;
169
}
170
171
const currentCache = this._caches.get(scenario);
172
if (currentCache) {
173
this._caches.set(scenario, { messages: currentCache.messages, fetchInProgress: true });
174
}
175
176
const fetchPromise = this._fetchMessages(scenario).finally(() => {
177
this._pendingFetches.delete(scenario);
178
const cache = this._caches.get(scenario);
179
if (cache) {
180
this._caches.set(scenario, { messages: cache.messages, fetchInProgress: false });
181
}
182
});
183
184
this._pendingFetches.set(scenario, fetchPromise);
185
}
186
187
private async _fetchMessages(scenario: ProgressMessageScenario): Promise<void> {
188
try {
189
const endpoint = await this._endpointProvider.getChatEndpoint('copilot-fast');
190
191
const props: ProgressMessagesPromptProps = { scenario, count: MESSAGES_PER_FETCH };
192
const { messages: promptMessages } = await renderPromptElement(
193
this._instantiationService,
194
endpoint,
195
ProgressMessagesPrompt,
196
props
197
);
198
199
const response = await endpoint.makeChatRequest2({
200
debugName: 'progressMessages',
201
messages: promptMessages,
202
finishedCb: undefined,
203
location: ChatLocation.Editor,
204
userInitiatedRequest: false,
205
isConversationRequest: false,
206
}, CancellationToken.None);
207
208
if (response.type === ChatFetchResponseType.Success) {
209
const newMessages = this._parseMessages(response.value);
210
if (newMessages.length > 0) {
211
const currentCache = this._caches.get(scenario);
212
const existingMessages = currentCache?.messages ?? [];
213
this._caches.set(scenario, {
214
messages: [...existingMessages, ...newMessages],
215
fetchInProgress: false
216
});
217
this._logService.trace(`[InlineChatProgressMessages] Fetched ${newMessages.length} messages for ${scenario}`);
218
}
219
} else {
220
this._logService.warn(`[InlineChatProgressMessages] Failed to fetch messages for ${scenario}: ${response.reason}`);
221
}
222
} catch (err) {
223
this._logService.error(`[InlineChatProgressMessages] Error fetching messages for ${scenario}`, err);
224
}
225
}
226
227
private _parseMessages(responseText: string): string[] {
228
try {
229
// Try to extract JSON array from the response
230
const trimmed = responseText.trim();
231
let jsonStr = trimmed;
232
233
// Handle markdown code blocks
234
const jsonMatch = trimmed.match(/```(?:json)?\s*([\s\S]*?)```/);
235
if (jsonMatch) {
236
jsonStr = jsonMatch[1].trim();
237
}
238
239
const parsed = JSON.parse(jsonStr);
240
if (Array.isArray(parsed) && parsed.every(item => typeof item === 'string')) {
241
return parsed.filter(msg => msg.length > 0 && msg.length < 50);
242
}
243
} catch (err) {
244
this._logService.error('[InlineChatProgressMessages] Failed to parse response as JSON', err);
245
}
246
247
return [];
248
}
249
}
250
251