Path: blob/main/extensions/copilot/src/extension/prompt/node/intents.ts
13399 views
/*---------------------------------------------------------------------------------------------1* Copyright (c) Microsoft Corporation. All rights reserved.2* Licensed under the MIT License. See License.txt in the project root for license information.3*--------------------------------------------------------------------------------------------*/45import { MetadataMap, Raw, RenderPromptResult } from '@vscode/prompt-tsx';6import type * as vscode from 'vscode';7import { IResponsePart } from '../../../platform/chat/common/chatMLFetcher';8import { ChatLocation, ChatResponse } from '../../../platform/chat/common/commonTypes';9import { PositionOffsetTransformer } from '../../../platform/editing/common/positionOffsetTransformer';10import { IChatEndpoint } from '../../../platform/networking/common/networking';11import { AsyncIterableObject, AsyncIterableSource } from '../../../util/vs/base/common/async';12import { CancellationToken } from '../../../util/vs/base/common/cancellation';13import { TextEdit } from '../../../vscodeTypes';14import { ISessionTurnStorage, OutcomeAnnotation } from '../../inlineChat/node/promptCraftingTypes';15import { IContributedLinkifierFactory } from '../../linkify/common/linkifyService';16import { StreamPipe, forEachStreamed } from '../../prompts/node/inline/utils/streaming';17import { ContributedToolName } from '../../tools/common/toolNames';18import { ChatVariablesCollection } from '../common/chatVariablesCollection';19import { Conversation, PromptMetadata, Turn } from '../common/conversation';20import { IBuildPromptContext } from '../common/intents';21import { ChatTelemetryBuilder } from './chatParticipantTelemetry';22import { IDocumentContext } from './documentContext';23import { AsyncReader, ClassifiedTextPiece, IStreamingEditsStrategy, IStreamingTextPieceClassifier, StreamingEditsResult, TextPieceKind, streamLines } from './streamingEdits';2425export interface IIntentSlashCommandInfo {2627// TODO@jrieken REMOVE, implicit via existence of commandInfo28readonly hiddenFromUser?: boolean;29readonly allowsEmptyArgs?: boolean; // True by default30readonly defaultEnablement?: boolean; // True by default3132readonly toolEquivalent?: ContributedToolName;33}3435export interface IIntentInvocationContext {3637/**38* The locations where this intent can be invoked: panel and or inline39*/40readonly location: ChatLocation;4142/**43* The document context to use44*/45readonly documentContext?: IDocumentContext;4647readonly request: vscode.ChatRequest;4849readonly slashCommand?: vscode.ChatCommand;50}5152export interface IIntent {5354/**55* The id of this intent, without a leading slash.56*/57readonly id: string;585960/**61* The description of this intent, used for the help command.62*/63readonly description: string;6465/**66* The locations where this intent can be invoked: panel and or inline67*/68readonly locations: ChatLocation[];6970/**71* How this is wired up to the slash command system. *Note* that `undefined` means default wiring is used.72*/73readonly commandInfo?: IIntentSlashCommandInfo;7475/**76* Whether this intent is listed as a capability in the prompt. Defaults to true.77*/78readonly isListedCapability?: boolean;7980/**81* This intent is invoked, return an invocation object that will be used to craft the prompt and to process the82* response. The passed context must be used to the entire invocation83*84*/85invoke(invocationContext: IIntentInvocationContext): Promise<IIntentInvocation>;8687/**88* Handle a request. Note that when defined `invoke` isn't called anymore, e.g return89* the `NullIntentInvocation` or throw an error.90*/91handleRequest?(92conversation: Conversation,93request: vscode.ChatRequest,94stream: vscode.ChatResponseStream,95token: CancellationToken,96documentContext: IDocumentContext | undefined,97agentName: string,98location: ChatLocation,99chatTelemetry: ChatTelemetryBuilder,100yieldRequested: () => boolean,101): Promise<vscode.ChatResult>;102}103104105/**106* An error type that can be thrown from {@link IIntent.invoke} to signal an107* ordinary error to the user.108*109* note: this is only treated specially in stests at the moment110*/111export class IntentError extends Error {112public readonly errorDetails: vscode.ChatErrorDetails;113114constructor(115error: string | vscode.ChatErrorDetails,116) {117super(typeof error === 'string' ? error : error.message);118this.errorDetails = typeof error === 'string' ? { message: error } : error;119}120}121122export interface IntentLinkificationOptions {123readonly disable?: boolean;124readonly additionaLinkifiers?: readonly IContributedLinkifierFactory[];125}126127128export const nullRenderPromptResult = (): RenderPromptResult => ({129hasIgnoredFiles: false,130messages: [],131omittedReferences: [],132references: [],133tokenCount: 0,134metadata: promptResultMetadata([]),135});136137export const promptResultMetadata = (metadata: PromptMetadata[]): MetadataMap => ({138get<T extends PromptMetadata>(key: new (...args: any[]) => T): T | undefined {139return metadata.find(m => m instanceof key) as T | undefined;140},141getAll<T extends PromptMetadata>(key: new (...args: any[]) => T): T[] {142return metadata.filter(m => m instanceof key) as T[];143}144});145146/**147* Generic marker type of telemetry data that can be passed148* along in an opaque way149*/150export class TelemetryData extends PromptMetadata {151152override toString(): string {153return `[TelemetryData](${super.toString()})`;154}155}156157export interface IBuildPromptResult extends RenderPromptResult {158159telemetryData?: readonly TelemetryData[];160}161162163export interface IIntentInvocation extends Partial<IResponseProcessor> {164165/**166* The intent that was invoked (owns this invocation)167*/168readonly intent: IIntent;169170/**171* The location for this invocation.172*/173readonly location: ChatLocation;174175/**176* The endpoint for this invocation.177*/178readonly endpoint: IChatEndpoint;179180/**181* Tools that should should be made available to the invocation. If not182* provided, the default {@link IToolsService.getEnabledTools} will be used183* with no specific filter.184*/185getAvailableTools?(): vscode.LanguageModelToolInformation[] | Promise<vscode.LanguageModelToolInformation[]> | undefined;186187/**188* Build the prompt which is a system and different user messages.189*/190buildPrompt(191context: IBuildPromptContext,192progress: vscode.Progress<vscode.ChatResponseReferencePart | vscode.ChatResponseProgressPart>,193token: vscode.CancellationToken194): Promise<IBuildPromptResult>;195196/**197* ONLY: panel198*199* Called when a request with confirmation data is made, and handles the request. The PromptCrafter/ResponseProcessor will not be called in this scenario.200*/201confirmationHandler?(acceptedConfirmationData: any[] | undefined, rejectedConfirmationData: any[] | undefined, progress: vscode.ChatResponseStream): Promise<void>;202203readonly linkification?: IntentLinkificationOptions;204205readonly codeblocksRepresentEdits?: boolean;206207modifyErrorDetails?(errorDetails: vscode.ChatErrorDetails, response: ChatResponse): vscode.ChatErrorDetails;208209getAdditionalVariables?(context: IBuildPromptContext): ChatVariablesCollection | undefined;210}211212export class NullIntentInvocation implements IIntentInvocation {213214constructor(215readonly intent: IIntent,216readonly location: ChatLocation,217readonly endpoint: IChatEndpoint218) { }219220async buildPrompt(): Promise<RenderPromptResult> {221return nullRenderPromptResult();222}223}224225export interface IResponseProcessorContext {226/**227* The chat session id228*/229readonly chatSessionId: string;230231/**232* The current running turn233*/234readonly turn: Turn;235236/**237* The messages that have been sent with the LLM request238*/239readonly messages: readonly Raw.ChatMessage[];240241/**242* Record annotations that occurred when processing the LLM reply.243*/244addAnnotations(annotations: OutcomeAnnotation[]): void;245246/**247* Store in inline chat session storage.248* ONLY: inline249*/250storeInInlineSession(store: ISessionTurnStorage): void;251}252253export interface IResponseProcessor {254/**255* Process a response as it streams in from the LLM.256*257* Anything reported to the progress object will be shown to the user in the UI.258* This allows processing the response as it streams in and selectively reporting it to the user.259*260* The LLM request will be cancelled when returning early (before the input stream finishes).261*262* @param context Context allowing to get more information about the request or to store more information generated during response processing263* @param inputStream The stream containing the LLM response264* @param outputStream The stream to report the processed response to the user265* @param token A cancellation token266*/267processResponse(context: IResponseProcessorContext, inputStream: AsyncIterable<IResponsePart>, outputStream: vscode.ChatResponseStream, token: CancellationToken): Promise<vscode.ChatResult | void>;268}269270export class ReplyInterpreterMetaData extends PromptMetadata {271constructor(public readonly replyInterpreter: ReplyInterpreter) {272super();273}274}275276export interface ReplyInterpreter {277processResponse(context: IResponseProcessorContext, inputStream: AsyncIterable<IResponsePart>, outputStream: vscode.ChatResponseStream, token: CancellationToken): Promise<void>;278}279280export class StreamingMarkdownReplyInterpreter implements ReplyInterpreter {281async processResponse(context: IResponseProcessorContext, inputStream: AsyncIterable<IResponsePart>, outputStream: vscode.ChatResponseStream, token: CancellationToken): Promise<void> {282for await (const part of inputStream) {283outputStream.markdown(part.delta.text);284}285}286}287288export class NoopReplyInterpreter implements ReplyInterpreter {289async processResponse(): Promise<void> {290return undefined;291}292}293294export function applyEdits(text: string, edits: TextEdit[]): string {295const transformer = new PositionOffsetTransformer(text);296const offsetEdits = edits.map(e => {297const offsetRange = transformer.toOffsetRange(e.range);298return ({299startOffset: offsetRange.start,300endOffset: offsetRange.endExclusive,301text: e.newText302});303});304305// sort is stable: does not change the order of edits that start at the same offset306offsetEdits.sort((a, b) => a.startOffset - b.startOffset || a.endOffset - b.endOffset);307308for (let i = offsetEdits.length - 1; i >= 0; i--) {309const edit = offsetEdits[i];310text = text.substring(0, edit.startOffset) + edit.text + text.substring(edit.endOffset);311}312313return text;314}315316export type LeadingMarkdownStreaming = StreamPipe<string>;317export const LeadingMarkdownStreaming = {318Mute: StreamPipe.discard<string>(),319Emit: StreamPipe.identity<string>(),320};321322export const enum EarlyStopping {323None,324StopAfterFirstCodeBlock,325}326327export class StreamingEditsController {328329private readonly _responseStream = new AsyncIterableSource<string>();330private _lastLength: number = 0;331private _leftFirstCodeBlock = false;332private _streamingPromise: Promise<StreamingEditsResult>;333334constructor(335private readonly _outputStream: vscode.ChatResponseStream,336private readonly _leadingMarkdownStreamPipe: StreamPipe<string>,337private readonly _earlyStopping: EarlyStopping,338textPieceClassifier: IStreamingTextPieceClassifier,339streamingEditsStrategy: IStreamingEditsStrategy,340) {341const textPieceStream = textPieceClassifier(this._responseStream.asyncIterable);342this._streamingPromise = this._process(textPieceStream, streamingEditsStrategy);343}344345private async _process(textPieceStream: AsyncIterableObject<ClassifiedTextPiece>, streamingEditsStrategy: IStreamingEditsStrategy): Promise<StreamingEditsResult> {346const leadingMarkdown = new AsyncIterableSource<string>();347348const processedMarkdown = this._leadingMarkdownStreamPipe(leadingMarkdown.asyncIterable);349forEachStreamed(processedMarkdown, item => this._outputStream.markdown(item));350351const firstCodeBlockText = new AsyncIterableSource<string>();352const firstCodeBlockLines = streamLines(firstCodeBlockText.asyncIterable);353const streamingEditsPromise = streamingEditsStrategy.processStream(firstCodeBlockLines);354355const textPieceStreamWithoutDelimiters = textPieceStream.filter(piece => piece.kind !== TextPieceKind.Delimiter);356const reader = new AsyncReader(textPieceStreamWithoutDelimiters[Symbol.asyncIterator]());357358// Read all the markdown pieces until the first code block359await reader.readWhile(360piece => piece.kind === TextPieceKind.OutsideCodeBlock,361piece => leadingMarkdown.emitOne(piece.value)362);363leadingMarkdown.resolve();364365// Read the first code block366await reader.readWhile(367piece => piece.kind === TextPieceKind.InsideCodeBlock,368piece => firstCodeBlockText.emitOne(piece.value)369);370371this._leftFirstCodeBlock = true;372373// Finish reading the rest of the text374await reader.consumeToEnd();375376firstCodeBlockText.resolve();377378return streamingEditsPromise;379}380381public update(newText: string): { shouldFinish: boolean } {382if (this._earlyStopping === EarlyStopping.StopAfterFirstCodeBlock && this._leftFirstCodeBlock) {383// stop was requested!384return { shouldFinish: true };385}386387const chunk = newText.slice(this._lastLength);388this._lastLength = newText.length;389this._responseStream.emitOne(chunk);390return { shouldFinish: false };391}392393public async finish(): Promise<StreamingEditsResult> {394this._responseStream.resolve();395return await this._streamingPromise;396}397}398399400