Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/prompt/node/intents.ts
13399 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { MetadataMap, Raw, RenderPromptResult } from '@vscode/prompt-tsx';
7
import type * as vscode from 'vscode';
8
import { IResponsePart } from '../../../platform/chat/common/chatMLFetcher';
9
import { ChatLocation, ChatResponse } from '../../../platform/chat/common/commonTypes';
10
import { PositionOffsetTransformer } from '../../../platform/editing/common/positionOffsetTransformer';
11
import { IChatEndpoint } from '../../../platform/networking/common/networking';
12
import { AsyncIterableObject, AsyncIterableSource } from '../../../util/vs/base/common/async';
13
import { CancellationToken } from '../../../util/vs/base/common/cancellation';
14
import { TextEdit } from '../../../vscodeTypes';
15
import { ISessionTurnStorage, OutcomeAnnotation } from '../../inlineChat/node/promptCraftingTypes';
16
import { IContributedLinkifierFactory } from '../../linkify/common/linkifyService';
17
import { StreamPipe, forEachStreamed } from '../../prompts/node/inline/utils/streaming';
18
import { ContributedToolName } from '../../tools/common/toolNames';
19
import { ChatVariablesCollection } from '../common/chatVariablesCollection';
20
import { Conversation, PromptMetadata, Turn } from '../common/conversation';
21
import { IBuildPromptContext } from '../common/intents';
22
import { ChatTelemetryBuilder } from './chatParticipantTelemetry';
23
import { IDocumentContext } from './documentContext';
24
import { AsyncReader, ClassifiedTextPiece, IStreamingEditsStrategy, IStreamingTextPieceClassifier, StreamingEditsResult, TextPieceKind, streamLines } from './streamingEdits';
25
26
export interface IIntentSlashCommandInfo {
27
28
// TODO@jrieken REMOVE, implicit via existence of commandInfo
29
readonly hiddenFromUser?: boolean;
30
readonly allowsEmptyArgs?: boolean; // True by default
31
readonly defaultEnablement?: boolean; // True by default
32
33
readonly toolEquivalent?: ContributedToolName;
34
}
35
36
export interface IIntentInvocationContext {
37
38
/**
39
* The locations where this intent can be invoked: panel and or inline
40
*/
41
readonly location: ChatLocation;
42
43
/**
44
* The document context to use
45
*/
46
readonly documentContext?: IDocumentContext;
47
48
readonly request: vscode.ChatRequest;
49
50
readonly slashCommand?: vscode.ChatCommand;
51
}
52
53
export interface IIntent {
54
55
/**
56
* The id of this intent, without a leading slash.
57
*/
58
readonly id: string;
59
60
61
/**
62
* The description of this intent, used for the help command.
63
*/
64
readonly description: string;
65
66
/**
67
* The locations where this intent can be invoked: panel and or inline
68
*/
69
readonly locations: ChatLocation[];
70
71
/**
72
* How this is wired up to the slash command system. *Note* that `undefined` means default wiring is used.
73
*/
74
readonly commandInfo?: IIntentSlashCommandInfo;
75
76
/**
77
* Whether this intent is listed as a capability in the prompt. Defaults to true.
78
*/
79
readonly isListedCapability?: boolean;
80
81
/**
82
* This intent is invoked, return an invocation object that will be used to craft the prompt and to process the
83
* response. The passed context must be used to the entire invocation
84
*
85
*/
86
invoke(invocationContext: IIntentInvocationContext): Promise<IIntentInvocation>;
87
88
/**
89
* Handle a request. Note that when defined `invoke` isn't called anymore, e.g return
90
* the `NullIntentInvocation` or throw an error.
91
*/
92
handleRequest?(
93
conversation: Conversation,
94
request: vscode.ChatRequest,
95
stream: vscode.ChatResponseStream,
96
token: CancellationToken,
97
documentContext: IDocumentContext | undefined,
98
agentName: string,
99
location: ChatLocation,
100
chatTelemetry: ChatTelemetryBuilder,
101
yieldRequested: () => boolean,
102
): Promise<vscode.ChatResult>;
103
}
104
105
106
/**
107
* An error type that can be thrown from {@link IIntent.invoke} to signal an
108
* ordinary error to the user.
109
*
110
* note: this is only treated specially in stests at the moment
111
*/
112
export class IntentError extends Error {
113
public readonly errorDetails: vscode.ChatErrorDetails;
114
115
constructor(
116
error: string | vscode.ChatErrorDetails,
117
) {
118
super(typeof error === 'string' ? error : error.message);
119
this.errorDetails = typeof error === 'string' ? { message: error } : error;
120
}
121
}
122
123
export interface IntentLinkificationOptions {
124
readonly disable?: boolean;
125
readonly additionaLinkifiers?: readonly IContributedLinkifierFactory[];
126
}
127
128
129
export const nullRenderPromptResult = (): RenderPromptResult => ({
130
hasIgnoredFiles: false,
131
messages: [],
132
omittedReferences: [],
133
references: [],
134
tokenCount: 0,
135
metadata: promptResultMetadata([]),
136
});
137
138
export const promptResultMetadata = (metadata: PromptMetadata[]): MetadataMap => ({
139
get<T extends PromptMetadata>(key: new (...args: any[]) => T): T | undefined {
140
return metadata.find(m => m instanceof key) as T | undefined;
141
},
142
getAll<T extends PromptMetadata>(key: new (...args: any[]) => T): T[] {
143
return metadata.filter(m => m instanceof key) as T[];
144
}
145
});
146
147
/**
148
* Generic marker type of telemetry data that can be passed
149
* along in an opaque way
150
*/
151
export class TelemetryData extends PromptMetadata {
152
153
override toString(): string {
154
return `[TelemetryData](${super.toString()})`;
155
}
156
}
157
158
export interface IBuildPromptResult extends RenderPromptResult {
159
160
telemetryData?: readonly TelemetryData[];
161
}
162
163
164
export interface IIntentInvocation extends Partial<IResponseProcessor> {
165
166
/**
167
* The intent that was invoked (owns this invocation)
168
*/
169
readonly intent: IIntent;
170
171
/**
172
* The location for this invocation.
173
*/
174
readonly location: ChatLocation;
175
176
/**
177
* The endpoint for this invocation.
178
*/
179
readonly endpoint: IChatEndpoint;
180
181
/**
182
* Tools that should should be made available to the invocation. If not
183
* provided, the default {@link IToolsService.getEnabledTools} will be used
184
* with no specific filter.
185
*/
186
getAvailableTools?(): vscode.LanguageModelToolInformation[] | Promise<vscode.LanguageModelToolInformation[]> | undefined;
187
188
/**
189
* Build the prompt which is a system and different user messages.
190
*/
191
buildPrompt(
192
context: IBuildPromptContext,
193
progress: vscode.Progress<vscode.ChatResponseReferencePart | vscode.ChatResponseProgressPart>,
194
token: vscode.CancellationToken
195
): Promise<IBuildPromptResult>;
196
197
/**
198
* ONLY: panel
199
*
200
* Called when a request with confirmation data is made, and handles the request. The PromptCrafter/ResponseProcessor will not be called in this scenario.
201
*/
202
confirmationHandler?(acceptedConfirmationData: any[] | undefined, rejectedConfirmationData: any[] | undefined, progress: vscode.ChatResponseStream): Promise<void>;
203
204
readonly linkification?: IntentLinkificationOptions;
205
206
readonly codeblocksRepresentEdits?: boolean;
207
208
modifyErrorDetails?(errorDetails: vscode.ChatErrorDetails, response: ChatResponse): vscode.ChatErrorDetails;
209
210
getAdditionalVariables?(context: IBuildPromptContext): ChatVariablesCollection | undefined;
211
}
212
213
export class NullIntentInvocation implements IIntentInvocation {
214
215
constructor(
216
readonly intent: IIntent,
217
readonly location: ChatLocation,
218
readonly endpoint: IChatEndpoint
219
) { }
220
221
async buildPrompt(): Promise<RenderPromptResult> {
222
return nullRenderPromptResult();
223
}
224
}
225
226
export interface IResponseProcessorContext {
227
/**
228
* The chat session id
229
*/
230
readonly chatSessionId: string;
231
232
/**
233
* The current running turn
234
*/
235
readonly turn: Turn;
236
237
/**
238
* The messages that have been sent with the LLM request
239
*/
240
readonly messages: readonly Raw.ChatMessage[];
241
242
/**
243
* Record annotations that occurred when processing the LLM reply.
244
*/
245
addAnnotations(annotations: OutcomeAnnotation[]): void;
246
247
/**
248
* Store in inline chat session storage.
249
* ONLY: inline
250
*/
251
storeInInlineSession(store: ISessionTurnStorage): void;
252
}
253
254
export interface IResponseProcessor {
255
/**
256
* Process a response as it streams in from the LLM.
257
*
258
* Anything reported to the progress object will be shown to the user in the UI.
259
* This allows processing the response as it streams in and selectively reporting it to the user.
260
*
261
* The LLM request will be cancelled when returning early (before the input stream finishes).
262
*
263
* @param context Context allowing to get more information about the request or to store more information generated during response processing
264
* @param inputStream The stream containing the LLM response
265
* @param outputStream The stream to report the processed response to the user
266
* @param token A cancellation token
267
*/
268
processResponse(context: IResponseProcessorContext, inputStream: AsyncIterable<IResponsePart>, outputStream: vscode.ChatResponseStream, token: CancellationToken): Promise<vscode.ChatResult | void>;
269
}
270
271
export class ReplyInterpreterMetaData extends PromptMetadata {
272
constructor(public readonly replyInterpreter: ReplyInterpreter) {
273
super();
274
}
275
}
276
277
export interface ReplyInterpreter {
278
processResponse(context: IResponseProcessorContext, inputStream: AsyncIterable<IResponsePart>, outputStream: vscode.ChatResponseStream, token: CancellationToken): Promise<void>;
279
}
280
281
export class StreamingMarkdownReplyInterpreter implements ReplyInterpreter {
282
async processResponse(context: IResponseProcessorContext, inputStream: AsyncIterable<IResponsePart>, outputStream: vscode.ChatResponseStream, token: CancellationToken): Promise<void> {
283
for await (const part of inputStream) {
284
outputStream.markdown(part.delta.text);
285
}
286
}
287
}
288
289
export class NoopReplyInterpreter implements ReplyInterpreter {
290
async processResponse(): Promise<void> {
291
return undefined;
292
}
293
}
294
295
export function applyEdits(text: string, edits: TextEdit[]): string {
296
const transformer = new PositionOffsetTransformer(text);
297
const offsetEdits = edits.map(e => {
298
const offsetRange = transformer.toOffsetRange(e.range);
299
return ({
300
startOffset: offsetRange.start,
301
endOffset: offsetRange.endExclusive,
302
text: e.newText
303
});
304
});
305
306
// sort is stable: does not change the order of edits that start at the same offset
307
offsetEdits.sort((a, b) => a.startOffset - b.startOffset || a.endOffset - b.endOffset);
308
309
for (let i = offsetEdits.length - 1; i >= 0; i--) {
310
const edit = offsetEdits[i];
311
text = text.substring(0, edit.startOffset) + edit.text + text.substring(edit.endOffset);
312
}
313
314
return text;
315
}
316
317
export type LeadingMarkdownStreaming = StreamPipe<string>;
318
export const LeadingMarkdownStreaming = {
319
Mute: StreamPipe.discard<string>(),
320
Emit: StreamPipe.identity<string>(),
321
};
322
323
export const enum EarlyStopping {
324
None,
325
StopAfterFirstCodeBlock,
326
}
327
328
export class StreamingEditsController {
329
330
private readonly _responseStream = new AsyncIterableSource<string>();
331
private _lastLength: number = 0;
332
private _leftFirstCodeBlock = false;
333
private _streamingPromise: Promise<StreamingEditsResult>;
334
335
constructor(
336
private readonly _outputStream: vscode.ChatResponseStream,
337
private readonly _leadingMarkdownStreamPipe: StreamPipe<string>,
338
private readonly _earlyStopping: EarlyStopping,
339
textPieceClassifier: IStreamingTextPieceClassifier,
340
streamingEditsStrategy: IStreamingEditsStrategy,
341
) {
342
const textPieceStream = textPieceClassifier(this._responseStream.asyncIterable);
343
this._streamingPromise = this._process(textPieceStream, streamingEditsStrategy);
344
}
345
346
private async _process(textPieceStream: AsyncIterableObject<ClassifiedTextPiece>, streamingEditsStrategy: IStreamingEditsStrategy): Promise<StreamingEditsResult> {
347
const leadingMarkdown = new AsyncIterableSource<string>();
348
349
const processedMarkdown = this._leadingMarkdownStreamPipe(leadingMarkdown.asyncIterable);
350
forEachStreamed(processedMarkdown, item => this._outputStream.markdown(item));
351
352
const firstCodeBlockText = new AsyncIterableSource<string>();
353
const firstCodeBlockLines = streamLines(firstCodeBlockText.asyncIterable);
354
const streamingEditsPromise = streamingEditsStrategy.processStream(firstCodeBlockLines);
355
356
const textPieceStreamWithoutDelimiters = textPieceStream.filter(piece => piece.kind !== TextPieceKind.Delimiter);
357
const reader = new AsyncReader(textPieceStreamWithoutDelimiters[Symbol.asyncIterator]());
358
359
// Read all the markdown pieces until the first code block
360
await reader.readWhile(
361
piece => piece.kind === TextPieceKind.OutsideCodeBlock,
362
piece => leadingMarkdown.emitOne(piece.value)
363
);
364
leadingMarkdown.resolve();
365
366
// Read the first code block
367
await reader.readWhile(
368
piece => piece.kind === TextPieceKind.InsideCodeBlock,
369
piece => firstCodeBlockText.emitOne(piece.value)
370
);
371
372
this._leftFirstCodeBlock = true;
373
374
// Finish reading the rest of the text
375
await reader.consumeToEnd();
376
377
firstCodeBlockText.resolve();
378
379
return streamingEditsPromise;
380
}
381
382
public update(newText: string): { shouldFinish: boolean } {
383
if (this._earlyStopping === EarlyStopping.StopAfterFirstCodeBlock && this._leftFirstCodeBlock) {
384
// stop was requested!
385
return { shouldFinish: true };
386
}
387
388
const chunk = newText.slice(this._lastLength);
389
this._lastLength = newText.length;
390
this._responseStream.emitOne(chunk);
391
return { shouldFinish: false };
392
}
393
394
public async finish(): Promise<StreamingEditsResult> {
395
this._responseStream.resolve();
396
return await this._streamingPromise;
397
}
398
}
399
400