Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/prompt/node/test/chatMLFetcherResponseApiTelemetry.spec.ts
13405 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { Raw } from '@vscode/prompt-tsx';
7
import type { OpenAI } from 'openai';
8
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
9
import { IAuthenticationService } from '../../../../platform/authentication/common/authentication';
10
import { CopilotToken } from '../../../../platform/authentication/common/copilotToken';
11
import { IFetchMLOptions } from '../../../../platform/chat/common/chatMLFetcher';
12
import { IChatQuotaService } from '../../../../platform/chat/common/chatQuotaService';
13
import { ChatLocation } from '../../../../platform/chat/common/commonTypes';
14
import { IInteractionService } from '../../../../platform/chat/common/interactionService';
15
import { DefaultsOnlyConfigurationService } from '../../../../platform/configuration/common/defaultsOnlyConfigurationService';
16
import { InMemoryConfigurationService } from '../../../../platform/configuration/test/common/inMemoryConfigurationService';
17
import { ICAPIClientService } from '../../../../platform/endpoint/common/capiClient';
18
import { MockAuthenticationService } from '../../../../platform/ignore/node/test/mockAuthenticationService';
19
import { MockCAPIClientService } from '../../../../platform/ignore/node/test/mockCAPIClientService';
20
import { ILogService } from '../../../../platform/log/common/logService';
21
import { FinishedCallback } from '../../../../platform/networking/common/fetch';
22
import { FetcherId, IFetcherService, IHeaders, Response } from '../../../../platform/networking/common/fetcherService';
23
import { IChatEndpoint, IEndpointBody } from '../../../../platform/networking/common/networking';
24
import { NullChatWebSocketManager } from '../../../../platform/networking/node/chatWebSocketManager';
25
import { NoopOTelService } from '../../../../platform/otel/common/noopOtelService';
26
import { resolveOTelConfig } from '../../../../platform/otel/common/otelConfig';
27
import { NullRequestLogger } from '../../../../platform/requestLogger/node/nullRequestLogger';
28
import { NullExperimentationService } from '../../../../platform/telemetry/common/nullExperimentationService';
29
import { ITelemetryService } from '../../../../platform/telemetry/common/telemetry';
30
import { TelemetryData } from '../../../../platform/telemetry/common/telemetryData';
31
import { SpyingTelemetryService } from '../../../../platform/telemetry/node/spyingTelemetryService';
32
import { TestLogService } from '../../../../platform/testing/common/testLogService';
33
import { InstantiationServiceBuilder } from '../../../../util/common/services';
34
import { CancellationToken, CancellationTokenSource } from '../../../../util/vs/base/common/cancellation';
35
import { Event } from '../../../../util/vs/base/common/event';
36
import { DisposableStore } from '../../../../util/vs/base/common/lifecycle';
37
import { IInstantiationService } from '../../../../util/vs/platform/instantiation/common/instantiation';
38
import { IPowerService, NullPowerService } from '../../../power/common/powerService';
39
import { ChatMLFetcherImpl } from '../chatMLFetcher';
40
41
describe('ChatMLFetcherImpl Response API telemetry', () => {
42
let disposables: DisposableStore;
43
let fetcher: ChatMLFetcherImpl;
44
let mockFetcherService: MockFetcherService;
45
let spyingTelemetryService: SpyingTelemetryService;
46
let cancellationTokenSource: CancellationTokenSource;
47
48
beforeEach(() => {
49
disposables = new DisposableStore();
50
cancellationTokenSource = disposables.add(new CancellationTokenSource());
51
52
mockFetcherService = new MockFetcherService();
53
spyingTelemetryService = new SpyingTelemetryService();
54
const configurationService = new InMemoryConfigurationService(new DefaultsOnlyConfigurationService());
55
56
const logService = new TestLogService();
57
const experimentationService = new NullExperimentationService();
58
59
fetcher = new ChatMLFetcherImpl(
60
mockFetcherService as unknown as IFetcherService,
61
spyingTelemetryService,
62
new NullRequestLogger(),
63
logService,
64
new TestAuthenticationService() as unknown as IAuthenticationService,
65
createMockInteractionService(),
66
createMockChatQuotaService(),
67
new TestCAPIClientService() as unknown as ICAPIClientService,
68
createMockConversationOptions(),
69
configurationService,
70
experimentationService,
71
createMockPowerService(),
72
new InstantiationServiceBuilder([
73
[IFetcherService, mockFetcherService as unknown as IFetcherService],
74
[ITelemetryService, spyingTelemetryService],
75
[ICAPIClientService, new TestCAPIClientService() as unknown as ICAPIClientService],
76
]).seal() as unknown as IInstantiationService,
77
new NullChatWebSocketManager(),
78
new NoopOTelService(resolveOTelConfig({ env: {}, extensionVersion: '0.0.0', sessionId: 'test' })),
79
);
80
});
81
82
afterEach(() => {
83
disposables.dispose();
84
});
85
86
it('logs non-empty messagesJson for Response API requests (input field)', async () => {
87
// Create an endpoint that returns Response API format (input instead of messages)
88
const responseApiEndpoint = createResponseApiEndpoint();
89
90
mockFetcherService.queueResponse(createSuccessResponse('Hello!'));
91
92
const opts: IFetchMLOptions = {
93
debugName: 'test-response-api',
94
messages: [{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'Hello from Response API' }] }],
95
endpoint: responseApiEndpoint,
96
location: ChatLocation.Panel,
97
requestOptions: {},
98
finishedCb: undefined,
99
};
100
101
await fetcher.fetchMany(opts, cancellationTokenSource.token);
102
103
// Find the engine.messages telemetry event
104
const events = spyingTelemetryService.getEvents();
105
const engineMessagesEvents = events.telemetryServiceEvents.filter(
106
e => e.eventName === 'engine.messages'
107
);
108
109
expect(engineMessagesEvents.length).toBeGreaterThan(0);
110
111
// Find the input telemetry event (the one sent for input messages)
112
// Input events are sent in the .finally() block of _fetchWithInstrumentation
113
// which happens before the response processing that sends output events
114
const inputTelemetry = engineMessagesEvents[0]; // First event should be the input
115
expect(inputTelemetry).toBeDefined();
116
const inputProperties = inputTelemetry!.properties as Record<string, string>;
117
expect(inputProperties.messagesJson).toBeDefined();
118
119
// Parse the messagesJson and verify it's not empty
120
const messagesJson = JSON.parse(inputProperties.messagesJson);
121
expect(messagesJson.length).toBeGreaterThan(0);
122
123
// Verify the message content was properly converted from Response API input format
124
expect(messagesJson[0].role).toBe('user');
125
});
126
127
it('logs empty messagesJson when ChatCompletion API messages array is empty', async () => {
128
// Create an endpoint that returns ChatCompletion API format with empty messages
129
const chatCompletionEndpoint = createChatCompletionEndpointWithEmptyMessages();
130
131
mockFetcherService.queueResponse(createSuccessResponse('Hello!'));
132
133
const opts: IFetchMLOptions = {
134
debugName: 'test-empty-messages',
135
messages: [], // Empty messages
136
endpoint: chatCompletionEndpoint,
137
location: ChatLocation.Panel,
138
requestOptions: {},
139
finishedCb: undefined,
140
};
141
142
await fetcher.fetchMany(opts, cancellationTokenSource.token);
143
144
// Find the engine.messages telemetry event
145
const events = spyingTelemetryService.getEvents();
146
const engineMessagesEvents = events.telemetryServiceEvents.filter(
147
e => e.eventName === 'engine.messages'
148
);
149
150
// First event should be the input messages telemetry
151
const inputTelemetry = engineMessagesEvents[0];
152
153
if (inputTelemetry) {
154
// For ChatCompletion API with empty messages, messagesJson should be "[]"
155
const props = inputTelemetry.properties as Record<string, string>;
156
const messagesJson = JSON.parse(props.messagesJson);
157
expect(messagesJson.length).toBe(0);
158
}
159
});
160
});
161
162
// --- Test Helpers ---
163
164
/**
165
* Creates an endpoint that returns Response API format request body (with input instead of messages)
166
*/
167
function createResponseApiEndpoint(): IChatEndpoint {
168
return {
169
url: 'https://api.github.com/copilot/chat/responses',
170
urlOrRequestMetadata: 'https://api.github.com/copilot/chat/responses',
171
model: 'gpt-5-mini',
172
modelMaxPromptTokens: 8192,
173
maxOutputTokens: 4096,
174
supportsToolCalls: true,
175
supportsVision: false,
176
supportsPrediction: false,
177
showInModelPicker: true,
178
isDefault: true,
179
isFallback: false,
180
policy: 'enabled',
181
getHeaders: async () => ({}),
182
// This is the key part - return Response API format with input instead of messages
183
createRequestBody: (): IEndpointBody => {
184
const body: IEndpointBody & OpenAI.Responses.ResponseCreateParams = {
185
model: 'gpt-5-mini',
186
stream: true,
187
// Response API uses 'input' instead of 'messages'
188
input: [
189
{
190
role: 'user',
191
content: [{ type: 'input_text', text: 'Hello from Response API' }]
192
}
193
],
194
// No 'messages' field - this is what distinguishes Response API
195
};
196
return body;
197
},
198
acquireTokenizer: () => ({
199
countMessagesTokens: async () => 100,
200
countTokens: async () => 100,
201
tokenize: async () => [],
202
}),
203
processResponseFromChatEndpoint: async (_telemetryService: ITelemetryService, _logService: ILogService, response: Response, _expectedNumChoices: number, finishedCb: FinishedCallback, telemetryData: TelemetryData, _cancellationToken?: CancellationToken) => {
204
const text = await response.text();
205
if (finishedCb) {
206
await finishedCb(text, 0, { text });
207
}
208
return {
209
[Symbol.asyncIterator]: async function* () {
210
yield {
211
message: { role: Raw.ChatRole.Assistant, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text }] },
212
choiceIndex: 0,
213
requestId: {
214
headerRequestId: response.headers.get('x-request-id') || 'test-request-id',
215
gitHubRequestId: response.headers.get('x-github-request-id') || '',
216
completionId: '',
217
created: 0,
218
serverExperiments: '',
219
deploymentId: '',
220
},
221
tokens: [],
222
usage: undefined,
223
model: 'gpt-5-mini',
224
blockFinished: true,
225
finishReason: 'stop',
226
telemetryData: telemetryData,
227
};
228
}
229
};
230
},
231
acceptChatPolicy: async () => true,
232
doRequest: async () => {
233
throw new Error('Not implemented');
234
},
235
} as unknown as IChatEndpoint;
236
}
237
238
/**
239
* Creates an endpoint that returns ChatCompletion API format with empty messages
240
*/
241
function createChatCompletionEndpointWithEmptyMessages(): IChatEndpoint {
242
return {
243
url: 'https://api.github.com/copilot/chat/completions',
244
urlOrRequestMetadata: 'https://api.github.com/copilot/chat/completions',
245
model: 'test-model',
246
modelMaxPromptTokens: 8192,
247
maxOutputTokens: 4096,
248
supportsToolCalls: true,
249
supportsVision: false,
250
supportsPrediction: false,
251
showInModelPicker: true,
252
isDefault: true,
253
isFallback: false,
254
policy: 'enabled',
255
getHeaders: async () => ({}),
256
createRequestBody: (): IEndpointBody => ({
257
model: 'test-model',
258
messages: [], // Empty messages array - ChatCompletion API format
259
stream: true
260
}),
261
acquireTokenizer: () => ({
262
countMessagesTokens: async () => 100,
263
countTokens: async () => 100,
264
tokenize: async () => [],
265
}),
266
processResponseFromChatEndpoint: async (_telemetryService: ITelemetryService, _logService: ILogService, response: Response, _expectedNumChoices: number, finishedCb: FinishedCallback, telemetryData: TelemetryData, _cancellationToken?: CancellationToken) => {
267
const text = await response.text();
268
if (finishedCb) {
269
await finishedCb(text, 0, { text });
270
}
271
return {
272
[Symbol.asyncIterator]: async function* () {
273
yield {
274
message: { role: Raw.ChatRole.Assistant, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text }] },
275
choiceIndex: 0,
276
requestId: {
277
headerRequestId: response.headers.get('x-request-id') || 'test-request-id',
278
gitHubRequestId: response.headers.get('x-github-request-id') || '',
279
completionId: '',
280
created: 0,
281
serverExperiments: '',
282
deploymentId: '',
283
},
284
tokens: [],
285
usage: undefined,
286
model: 'test-model',
287
blockFinished: true,
288
finishReason: 'stop',
289
telemetryData: telemetryData,
290
};
291
}
292
};
293
},
294
acceptChatPolicy: async () => true,
295
doRequest: async () => {
296
throw new Error('Not implemented');
297
},
298
} as unknown as IChatEndpoint;
299
}
300
301
class MockFetcherService {
302
private _responseQueue: (Response | Error)[] = [];
303
private _fetchCallCount = 0;
304
305
get fetchCallCount(): number {
306
return this._fetchCallCount;
307
}
308
309
queueResponse(response: Response): void {
310
this._responseQueue.push(response);
311
}
312
313
queueError(error: Error): void {
314
this._responseQueue.push(error);
315
}
316
317
async fetch(_url: string, _options?: unknown): Promise<Response> {
318
this._fetchCallCount++;
319
const next = this._responseQueue.shift();
320
if (!next) {
321
throw new Error('No more queued responses');
322
}
323
if (next instanceof Error) {
324
throw next;
325
}
326
return next;
327
}
328
329
fetchWithPagination<T>(): Promise<T[]> {
330
throw new Error('Method not implemented.');
331
}
332
333
disconnectAll(): Promise<void> {
334
return Promise.resolve();
335
}
336
337
makeAbortController(): AbortController {
338
return new AbortController();
339
}
340
341
isAbortError(_err: unknown): boolean {
342
return false;
343
}
344
345
isInternetDisconnectedError(_err: unknown): boolean {
346
return false;
347
}
348
349
isFetcherError(err: unknown): boolean {
350
return err instanceof Error && 'code' in err;
351
}
352
353
getUserMessageForFetcherError(_err: unknown): string {
354
return 'Network error occurred';
355
}
356
357
getUserAgentLibrary(): string {
358
return 'test-agent';
359
}
360
}
361
362
class TestAuthenticationService extends MockAuthenticationService {
363
override getCopilotToken(_force?: boolean): Promise<CopilotToken> {
364
return Promise.resolve({
365
token: 'test-token',
366
username: 'test-user',
367
} as CopilotToken);
368
}
369
}
370
371
class TestCAPIClientService extends MockCAPIClientService {
372
get capiPingURL(): string {
373
return 'https://api.github.com/copilot_internal/ping';
374
}
375
}
376
377
function createMockInteractionService(): IInteractionService {
378
return {
379
_serviceBrand: undefined,
380
onInteractionStateChanged: Event.None,
381
sendChatInteraction: () => { },
382
getInteractionState: () => undefined,
383
interactionId: 'test-interaction-id',
384
} as unknown as IInteractionService;
385
}
386
387
function createMockChatQuotaService(): IChatQuotaService {
388
return {
389
_serviceBrand: undefined,
390
processQuotaHeaders: () => { },
391
} as unknown as IChatQuotaService;
392
}
393
394
function createMockConversationOptions() {
395
return {
396
_serviceBrand: undefined,
397
maxResponseTokens: 4096,
398
temperature: 0.5,
399
topP: 1,
400
rejectionMessage: 'rejected',
401
};
402
}
403
404
function createMockPowerService(): IPowerService {
405
return new NullPowerService();
406
}
407
408
class FakeHeaders implements IHeaders {
409
constructor(private readonly headers = new Map<string, string>()) { }
410
get(name: string): string | null {
411
return this.headers.get(name.toLowerCase()) ?? null;
412
}
413
*[Symbol.iterator](): Iterator<[string, string]> {
414
yield* this.headers.entries();
415
}
416
}
417
418
function createSuccessResponse(content: string): Response {
419
const streamContent = `data: {"choices":[{"delta":{"content":"${content}"},"index":0}]}\n\ndata: {"choices":[{"delta":{},"finish_reason":"stop","index":0}]}\n\ndata: [DONE]\n\n`;
420
return Response.fromText(
421
200,
422
'OK',
423
new FakeHeaders(new Map([
424
['content-type', 'text/event-stream'],
425
['x-request-id', 'test-request-id'],
426
])),
427
streamContent,
428
'node-fetch' as FetcherId
429
);
430
}
431
432