Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/agents/node/adapters/openaiAdapterForSTests.ts
13405 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { Raw } from '@vscode/prompt-tsx';
7
import { ChatCompletionContentPartKind } from '@vscode/prompt-tsx/dist/base/output/rawTypes';
8
import * as http from 'http';
9
import { ChatCompletionChunk, ChatCompletionCreateParamsBase, ChatCompletionMessageParam } from 'openai/resources/chat/completions.js';
10
import type { OpenAiFunctionTool } from '../../../../platform/networking/common/fetch';
11
import { IMakeChatRequestOptions } from '../../../../platform/networking/common/networking';
12
import { APIUsage } from '../../../../platform/networking/common/openai';
13
import { coalesce } from '../../../../util/vs/base/common/arrays';
14
import { IAgentStreamBlock, IParsedRequest, IProtocolAdapter, IProtocolAdapterFactory, IStreamEventData, IStreamingContext } from './types';
15
16
export class OpenAIAdapterFactoryForSTests implements IProtocolAdapterFactory {
17
private readonly requestHooks: ((body: string) => string)[] = [];
18
private readonly responseHooks: ((body: string) => string)[] = [];
19
createAdapter(): IProtocolAdapter {
20
return new OpenAIAdapterForSTests(this.requestHooks, this.responseHooks);
21
}
22
public addHooks(requestHook?: (body: string) => string, responseHook?: (body: string) => string): void {
23
if (requestHook) {
24
this.requestHooks.push(requestHook);
25
}
26
if (responseHook) {
27
this.responseHooks.push(responseHook);
28
}
29
}
30
}
31
32
class OpenAIAdapterForSTests implements IProtocolAdapter {
33
readonly name = 'openai';
34
35
// Per-request state
36
private currentBlockIndex = 0;
37
private hasTextBlock = false;
38
private hadToolCalls = false;
39
constructor(private readonly requestHooks: ((body: string) => string)[], private readonly responseHooks: ((body: string) => string)[] = []) {
40
// No-op for test adapter
41
}
42
43
parseRequest(body: string): IParsedRequest {
44
body = this.requestHooks.reduce((b, hook) => hook(b), body);
45
const requestBody: ChatCompletionCreateParamsBase = JSON.parse(body);
46
47
// Extract model information
48
const model = requestBody.model;
49
50
// Convert messages format if needed
51
const runHooks = (msg: string) => {
52
return this.requestHooks.reduce((b, hook) => hook(b), msg);
53
};
54
const messages = responseApiInputToRawMessages(requestBody.messages);
55
messages.forEach(msg => {
56
msg.content.forEach(part => {
57
switch (part.type) {
58
case ChatCompletionContentPartKind.Image: {
59
part.imageUrl.url = runHooks(part.imageUrl.url);
60
break;
61
}
62
case ChatCompletionContentPartKind.Opaque: {
63
if (typeof part.value === 'string') {
64
part.value = runHooks(part.value);
65
}
66
break;
67
}
68
case ChatCompletionContentPartKind.Text: {
69
part.text = runHooks(part.text);
70
break;
71
}
72
}
73
});
74
});
75
76
const options: IMakeChatRequestOptions['requestOptions'] = {
77
temperature: (requestBody.temperature ?? undefined),
78
max_tokens: (requestBody.max_tokens ?? requestBody.max_completion_tokens) ?? undefined,
79
};
80
81
if (requestBody.tools && Array.isArray(requestBody.tools) && requestBody.tools.length > 0) {
82
// Map OpenAI tools to VS Code chat tools
83
const tools = coalesce(requestBody.tools.map((tool) => {
84
if (tool.type === 'function' && tool.function) {
85
const chatTool: OpenAiFunctionTool = {
86
type: 'function',
87
function: {
88
name: tool.function.name,
89
description: tool.function.description || '',
90
parameters: tool.function.parameters || {},
91
}
92
};
93
return chatTool;
94
}
95
return undefined;
96
}));
97
if (tools.length) {
98
options.tools = tools;
99
}
100
}
101
102
return {
103
model,
104
messages,
105
options
106
};
107
}
108
109
110
private readonly textMessages = new Map<string, string>();
111
112
private collectTextContent(context: IStreamingContext, content: string): void {
113
const existing = this.textMessages.get(context.requestId) || '';
114
this.textMessages.set(context.requestId, existing + content);
115
}
116
private getCollectedTextContent(context: IStreamingContext): IStreamEventData | undefined {
117
let content = this.textMessages.get(context.requestId);
118
if (typeof content !== 'string') {
119
return undefined;
120
}
121
this.textMessages.delete(context.requestId);
122
content = this.responseHooks.reduce((b, hook) => hook(b), content);
123
124
// Send text delta events
125
const event = {
126
id: context.requestId,
127
object: 'chat.completion.chunk',
128
created: Math.floor(Date.now() / 1000),
129
model: context.endpoint.modelId,
130
choices: [{
131
index: this.currentBlockIndex,
132
delta: {
133
content,
134
role: 'assistant'
135
},
136
finish_reason: null
137
}]
138
} satisfies ChatCompletionChunk;
139
140
return {
141
event: 'message',
142
data: this.formatEventData(event)
143
};
144
}
145
formatStreamResponse(
146
streamData: IAgentStreamBlock,
147
context: IStreamingContext
148
): IStreamEventData[] {
149
const events: IStreamEventData[] = [];
150
151
if (streamData.type === 'text') {
152
if (!this.hasTextBlock) {
153
this.hasTextBlock = true;
154
}
155
156
// Collect all of the strings, as there could be references to file paths.
157
// At the end of the stream, we will send a single event with the full text & have file paths replaced.
158
this.collectTextContent(context, streamData.content);
159
} else if (streamData.type === 'tool_call') {
160
// End current text block if it exists
161
if (this.hasTextBlock) {
162
const event = this.getCollectedTextContent(context);
163
if (event) {
164
events.push(event);
165
}
166
this.currentBlockIndex++;
167
this.hasTextBlock = false;
168
}
169
170
this.hadToolCalls = true;
171
172
// Arguments can contain file paths.
173
const toolArguments = this.responseHooks.reduce((b, hook) => hook(b), JSON.stringify(streamData.input || {}));
174
175
// Send tool call events
176
const toolCallDelta: ChatCompletionChunk = {
177
id: context.requestId,
178
object: 'chat.completion.chunk',
179
created: Math.floor(Date.now() / 1000),
180
model: context.endpoint.modelId,
181
choices: [{
182
index: this.currentBlockIndex,
183
delta: {
184
tool_calls: [{
185
index: this.currentBlockIndex,
186
id: streamData.callId,
187
type: 'function',
188
function: {
189
name: streamData.name,
190
arguments: toolArguments
191
}
192
}]
193
},
194
finish_reason: null
195
}]
196
};
197
events.push({
198
event: 'message',
199
data: this.formatEventData(toolCallDelta)
200
});
201
202
this.currentBlockIndex++;
203
}
204
205
return events;
206
}
207
208
generateFinalEvents(context: IStreamingContext, usage?: APIUsage): IStreamEventData[] {
209
const events: IStreamEventData[] = [];
210
211
const event = this.getCollectedTextContent(context);
212
if (event) {
213
events.push(event);
214
}
215
216
// Send final completion event with usage information
217
const finalCompletion = {
218
id: context.requestId,
219
object: 'chat.completion.chunk',
220
created: Math.floor(Date.now() / 1000),
221
model: context.endpoint.modelId,
222
choices: [{
223
index: 0,
224
delta: { content: null },
225
finish_reason: this.hadToolCalls ? 'tool_calls' : 'stop'
226
}],
227
usage: usage ? {
228
prompt_tokens: usage.prompt_tokens,
229
completion_tokens: usage.completion_tokens,
230
total_tokens: usage.total_tokens
231
} : {
232
prompt_tokens: 0,
233
completion_tokens: 0,
234
total_tokens: 0
235
}
236
} satisfies ChatCompletionChunk;
237
238
events.push({
239
event: 'message',
240
data: this.formatEventData(finalCompletion)
241
});
242
243
return events;
244
}
245
246
generateInitialEvents(context: IStreamingContext): IStreamEventData[] {
247
// OpenAI doesn't typically send initial events, but we can send an empty one if needed
248
return [];
249
}
250
251
getContentType(): string {
252
return 'text/event-stream';
253
}
254
255
extractAuthKey(headers: http.IncomingHttpHeaders): string | undefined {
256
const authHeader = headers.authorization;
257
const bearerSpace = 'Bearer ';
258
return authHeader?.startsWith(bearerSpace) ? authHeader.substring(bearerSpace.length) : undefined;
259
}
260
261
private formatEventData(data: unknown): string {
262
return JSON.stringify(data).replace(/\n/g, '\\n');
263
}
264
}
265
function responseApiInputToRawMessages(messages: ChatCompletionMessageParam[]): Raw.ChatMessage[] {
266
const raw: Raw.ChatMessage[] = [];
267
268
// Helper to push or merge consecutive messages of same role
269
const pushOrMerge = (msg: Raw.ChatMessage) => {
270
const last = raw[raw.length - 1];
271
if (last && last.role === msg.role && last.role !== Raw.ChatRole.Tool) {
272
// Merge content arrays
273
last.content.push(...msg.content);
274
// Merge tool calls if assistant
275
if (last.role === Raw.ChatRole.Assistant && msg.role === Raw.ChatRole.Assistant && msg.toolCalls) {
276
const l = last as Raw.AssistantChatMessage;
277
l.toolCalls = [...(l.toolCalls || []), ...((msg as Raw.AssistantChatMessage).toolCalls || [])];
278
}
279
} else {
280
raw.push(msg);
281
}
282
};
283
284
messages.forEach(m => {
285
// Collect content parts
286
const contentParts: Raw.ChatCompletionContentPart[] = [];
287
288
// OpenAI message content can be string or ChatCompletionContentPart[]
289
(Array.isArray(m.content) ? m.content : []).forEach(part => {
290
switch (part.type) {
291
case 'text': {
292
contentParts.push({ type: Raw.ChatCompletionContentPartKind.Text, text: part.text });
293
break;
294
}
295
case 'image_url': {
296
contentParts.push({ imageUrl: { url: part.image_url.url, detail: part.image_url.detail as unknown as ('low' | 'high' | undefined) }, type: ChatCompletionContentPartKind.Image });
297
break;
298
}
299
case 'file': {
300
contentParts.push({ type: ChatCompletionContentPartKind.Opaque, value: `[File Input - Filename: ${part.file.filename}]` });
301
break;
302
}
303
case 'refusal': {
304
// Refusal parts contain a 'refusal' field; access defensively
305
contentParts.push({ type: Raw.ChatCompletionContentPartKind.Text, text: `[Refusal: ${part.refusal || ''}]` });
306
break;
307
}
308
case 'input_audio':
309
default: {
310
// Unknown part
311
}
312
}
313
});
314
if (typeof m.content === 'string') {
315
contentParts.push({ type: Raw.ChatCompletionContentPartKind.Text, text: m.content });
316
}
317
318
switch (m.role) {
319
case 'user': {
320
pushOrMerge({ role: Raw.ChatRole.User, content: contentParts });
321
return;
322
}
323
case 'tool': {
324
// contentParts.splice(0, contentParts.length);
325
raw.push({ role: Raw.ChatRole.Tool, content: contentParts, toolCallId: m.tool_call_id || '' });
326
return;
327
328
}
329
case 'assistant': {
330
const toolCalls: Raw.ChatMessageToolCall[] = (m.tool_calls || []).map(tc => {
331
try {
332
if (tc.type === 'function') {
333
return {
334
id: tc.id || tc.function.name || 'tool_call',
335
type: 'function',
336
function: {
337
name: tc.function.name || 'unknown_function',
338
arguments: typeof tc.function.arguments === 'string' ? tc.function.arguments : JSON.stringify(tc.function.arguments ?? {})
339
}
340
} satisfies Raw.ChatMessageToolCall;
341
}
342
} catch { }
343
// Fallback minimal tool call
344
return { id: 'tool_call', type: 'function', function: { name: 'unknown_function', arguments: '{}' } } satisfies Raw.ChatMessageToolCall;
345
});
346
const message: Raw.AssistantChatMessage = { role: Raw.ChatRole.Assistant, content: contentParts };
347
if (toolCalls.length) {
348
message.toolCalls = toolCalls;
349
}
350
pushOrMerge(message);
351
return;
352
}
353
case 'system':
354
case 'developer': {
355
// System (and any unexpected) messages
356
pushOrMerge({ role: Raw.ChatRole.System, content: contentParts, name: m.name });
357
return;
358
}
359
default: {
360
return;
361
}
362
}
363
});
364
365
return raw;
366
}
367
368
369