Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/chatSessions/claude/node/claudePromptResolver.ts
13405 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import Anthropic from '@anthropic-ai/sdk';
7
import type * as vscode from 'vscode';
8
import { isLocation } from '../../../../util/common/types';
9
import { URI } from '../../../../util/vs/base/common/uri';
10
import { ChatReferenceBinaryData } from '../../../../vscodeTypes';
11
import { toAnthropicImageMediaType } from './sessionParser/claudeSessionSchema';
12
13
// #region Prompt Resolution
14
15
function uriToString(uri: URI): string {
16
return uri.scheme === 'file' ? uri.fsPath : uri.toString();
17
}
18
19
/**
20
* Converts a `vscode.ChatRequest` into an array of Anthropic content blocks.
21
*
22
* - Inline references (`ref.range`) are substituted directly into the prompt text.
23
* - Non-inline references are appended as a `<system-reminder>` text block.
24
* - Binary image references become `image` content blocks.
25
* - Slash-command prompts (starting with `/`) are passed through unmodified.
26
*/
27
export async function resolvePromptToContentBlocks(request: vscode.ChatRequest): Promise<Anthropic.ContentBlockParam[]> {
28
if (request.prompt.startsWith('/')) {
29
return [{ type: 'text', text: request.prompt }];
30
}
31
32
let prompt = request.prompt;
33
const imageBlocks: Anthropic.ContentBlockParam[] = [];
34
const extraRefsTexts: string[] = [];
35
36
// Sort references with inline ranges by descending start position so that
37
// earlier replacements don't shift the indices of later ones.
38
const sortedRefs = [...request.references].sort((a, b) => {
39
const aStart = a.range?.[0] ?? -1;
40
const bStart = b.range?.[0] ?? -1;
41
return bStart - aStart;
42
});
43
44
for (const ref of sortedRefs) {
45
let refValue = ref.value;
46
if (refValue instanceof ChatReferenceBinaryData) {
47
const mediaType = toAnthropicImageMediaType(refValue.mimeType);
48
if (mediaType) {
49
const data = await refValue.data();
50
imageBlocks.push({
51
type: 'image',
52
source: {
53
type: 'base64',
54
data: Buffer.from(data).toString('base64'),
55
media_type: mediaType
56
}
57
});
58
continue;
59
}
60
if (!refValue.reference) {
61
continue;
62
}
63
refValue = refValue.reference;
64
}
65
66
const valueText = URI.isUri(refValue)
67
? uriToString(refValue)
68
: isLocation(refValue)
69
? `${uriToString(refValue.uri)}:${refValue.range.start.line + 1}`
70
: undefined;
71
if (valueText) {
72
if (ref.range) {
73
prompt = prompt.slice(0, ref.range[0]) + valueText + prompt.slice(ref.range[1]);
74
} else {
75
extraRefsTexts.push(`- ${valueText}`);
76
}
77
}
78
}
79
80
const contentBlocks: Anthropic.ContentBlockParam[] = [
81
{ type: 'text', text: request.command ? `/${request.command} ${prompt}` : prompt },
82
...imageBlocks,
83
];
84
85
if (extraRefsTexts.length > 0) {
86
contentBlocks.push({
87
type: 'text',
88
text: `<system-reminder>\nThe user provided the following references:\n${extraRefsTexts.join('\n')}\n\nIMPORTANT: this context may or may not be relevant to your tasks. You should not respond to this context unless it is highly relevant to your task.\n</system-reminder>`
89
});
90
}
91
92
return contentBlocks;
93
}
94
95
// #endregion
96
97