Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/prompts/node/panel/conversationHistory.tsx
13405 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { AssistantMessage, BasePromptElementProps, Chunk, PrioritizedList, PromptElement, PromptPiece, PromptSizing, TokenLimit, UserMessage } from '@vscode/prompt-tsx';
7
import { modelPrefersInstructionsAfterHistory } from '../../../../platform/endpoint/common/chatModelCapabilities';
8
import { URI } from '../../../../util/vs/base/common/uri';
9
import { Location } from '../../../../vscodeTypes';
10
import { ChatVariablesCollection, PromptVariable } from '../../../prompt/common/chatVariablesCollection';
11
import { IResultMetadata, Turn, TurnStatus } from '../../../prompt/common/conversation';
12
import { IBuildPromptContext } from '../../../prompt/common/intents';
13
import { AgentUserMessageInHistory } from '../agent/agentConversationHistory';
14
import { renderedMessageToTsxChildren } from '../agent/agentPrompt';
15
import { InstructionMessage } from '../base/instructionMessage';
16
import { IPromptEndpoint } from '../base/promptRenderer';
17
import { ChatVariablesAndQuery } from './chatVariables';
18
import { ChatToolCalls } from './toolCalling';
19
20
interface ConversationHistoryProps extends BasePromptElementProps {
21
history: readonly Turn[];
22
priority: number;
23
/**
24
* Signal that is used to roll up the history into a single message, only requests
25
* are considered (and historical responses are assumed to be source code).
26
*/
27
inline?: boolean;
28
currentTurnVars?: ChatVariablesCollection;
29
omitPromptVariables?: boolean;
30
}
31
32
/**
33
* This element should wrap instructions specific to any given model. It should
34
* include any {@link InstructionMessage}, and depending on the model it
35
* either includes the history before or after the instruction message.
36
*
37
* You should use `passPriority` with this: https://github.com/microsoft/vscode-prompt-tsx?tab=readme-ov-file#passing-priority
38
*
39
* @example
40
*
41
* <HistoryWithInstructions passPriority priority={700} history={history}>
42
* <InstructionMessage>Do the thing</InstructionMessage>
43
* </HistoryWithInstructions>
44
*/
45
export class HistoryWithInstructions extends PromptElement<Omit<ConversationHistoryProps, 'priority'> & { historyPriority: number }> {
46
constructor(
47
props: Omit<ConversationHistoryProps, 'priority'> & { historyPriority: number },
48
@IPromptEndpoint private readonly promptEndpoint: IPromptEndpoint
49
) {
50
super(props);
51
}
52
override render(_state: void, sizing: PromptSizing): PromptPiece {
53
const ep = this.promptEndpoint;
54
const { children, ...props } = this.props;
55
if (!children?.some(c => typeof c === 'object' && c.ctor === InstructionMessage)) {
56
// This is a sanity check, and could be removed if we eventually want to
57
// have wrappers around InstructionMessages, but for now this is useful.
58
throw new Error(`HistoryWithInstructions must have an InstructionMessage child`);
59
}
60
61
const after = modelPrefersInstructionsAfterHistory(ep.family);
62
return <>
63
{after ? <ConversationHistory {...props} passPriority={false} priority={this.props.historyPriority} /> : undefined}
64
{...children}
65
{after ? undefined : <ConversationHistory {...props} passPriority={false} priority={this.props.historyPriority} />}
66
</>;
67
}
68
}
69
70
/**
71
* @deprecated use `HistoryWithInstructions` instead
72
*/
73
export class ConversationHistory extends PromptElement<ConversationHistoryProps> {
74
override render(_state: void, _sizing: PromptSizing): PromptPiece<any, any> | undefined {
75
// exclude turns from the history that errored due to prompt filtration
76
let turnHistory = this.props.history.filter(turn => turn.responseStatus !== TurnStatus.PromptFiltered);
77
78
if (this.props.inline && turnHistory.length > 0) {
79
const historyMessage = `The current code is a result of a previous interaction with you. Here are my previous messages: \n- ${turnHistory.map(r => r.request.message).join('\n- ')}`;
80
turnHistory = [new Turn(undefined, { message: historyMessage, type: 'user' }, undefined)];
81
}
82
83
const history: (UserMessage | AssistantMessage)[] = [];
84
turnHistory.forEach((turn, index) => {
85
if (turn.request.type === 'user') {
86
const promptVariables = (turn.promptVariables && !this.props.omitPromptVariables) ? this.removeDuplicateVars(turn.promptVariables, this.props.currentTurnVars, turnHistory.slice(index + 1)) : new ChatVariablesCollection([]);
87
history.push(<ChatVariablesAndQuery priority={900} chatVariables={promptVariables} query={turn.request.message} omitReferences={true} embeddedInsideUserMessage={false} />);
88
}
89
if (turn.responseMessage?.type === 'model' && ![TurnStatus.OffTopic, TurnStatus.Filtered].includes(turn.responseStatus)) {
90
history.push(<AssistantMessage name={turn.responseMessage.name}>{turn.responseMessage.message}</AssistantMessage>);
91
}
92
});
93
94
return (
95
// Conversation history is currently limited to 32k tokens to avoid
96
// unnecessarily pushing into the larger and slower token SKUs
97
<TokenLimit max={32768}>
98
<PrioritizedList priority={this.props.priority} descending={false}>{history}</PrioritizedList>
99
</TokenLimit>
100
);
101
}
102
103
private removeDuplicateVars(historyVars: ChatVariablesCollection, currentTurnVars: ChatVariablesCollection | undefined, followingMessages: Turn[]): ChatVariablesCollection {
104
// TODO this is very simple, maybe we could use getUniqueReferences to merge ranges and be smarter. But it would take some rewriting of history for the model to
105
// understand what each history message was referring to.
106
return historyVars.filter(v1 => {
107
if (followingMessages.some(m => m.promptVariables?.find(v2 => variableEquals(v1, v2)))) {
108
return false;
109
}
110
111
if (currentTurnVars?.find(v2 => variableEquals(v1, v2))) {
112
return false;
113
}
114
115
return true;
116
});
117
}
118
}
119
120
function variableEquals(v1: PromptVariable, v2: PromptVariable) {
121
if (v1.uniqueName !== v2.uniqueName) {
122
return false;
123
}
124
125
if (URI.isUri(v1.value) && URI.isUri(v2.value)) {
126
return v1.value.toString() === v2.value.toString();
127
}
128
129
if (v1.value instanceof Location && v2.value instanceof Location) {
130
return JSON.stringify(v1.value) === JSON.stringify(v2.value);
131
}
132
133
return false;
134
}
135
136
export interface ConversationHistoryWithToolsProps extends BasePromptElementProps {
137
readonly priority: number;
138
readonly promptContext: IBuildPromptContext;
139
}
140
141
/**
142
* This is conversation history including tool calls, but not summaries. New usages should use SummarizedConversationHistory instead.
143
*/
144
export class ConversationHistoryWithTools extends PromptElement<ConversationHistoryWithToolsProps> {
145
override async render(state: void, sizing: PromptSizing) {
146
const history: PromptElement[] = [];
147
const contextHistory = this.props.promptContext.history;
148
for (const [i, turn] of contextHistory.entries()) {
149
const metadata = turn.responseChatResult?.metadata as IResultMetadata | undefined;
150
151
if (metadata?.renderedUserMessage) {
152
history.push(<UserMessage><Chunk>{renderedMessageToTsxChildren(metadata.renderedUserMessage, false)}</Chunk></UserMessage>);
153
} else {
154
history.push(<AgentUserMessageInHistory turn={turn} />);
155
}
156
157
if (Array.isArray(metadata?.toolCallRounds) && metadata.toolCallRounds?.length > 0) {
158
// If a tool call limit is exceeded, the tool call from this turn will
159
// have been aborted and any result should be found in the next turn.
160
const toolCallResultInNextTurn = metadata.maxToolCallsExceeded;
161
let toolCallResults = metadata.toolCallResults;
162
if (toolCallResultInNextTurn) {
163
const nextMetadata = contextHistory.at(i + 1)?.responseChatResult?.metadata as IResultMetadata | undefined;
164
const mergeFrom = i === contextHistory.length - 1 ? this.props.promptContext.toolCallResults : nextMetadata?.toolCallResults;
165
toolCallResults = { ...toolCallResults, ...mergeFrom };
166
}
167
168
history.push(<ChatToolCalls
169
promptContext={this.props.promptContext}
170
toolCallRounds={metadata.toolCallRounds}
171
toolCallResults={toolCallResults}
172
isHistorical={!(toolCallResultInNextTurn && i === contextHistory.length - 1)}
173
/>);
174
} else if (turn.responseMessage) {
175
history.push(<AssistantMessage>{turn.responseMessage?.message}</AssistantMessage>);
176
}
177
}
178
179
return (<PrioritizedList priority={this.props.priority} descending={false}>{history}</PrioritizedList>);
180
}
181
}
182
183