Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/byok/node/test/openAIEndpoint.spec.ts
13405 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { Raw } from '@vscode/prompt-tsx';
7
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
8
import { ConfigKey, IConfigurationService } from '../../../../platform/configuration/common/configurationService';
9
import { IChatModelInformation, ModelSupportedEndpoint } from '../../../../platform/endpoint/common/endpointProvider';
10
import { ICreateEndpointBodyOptions } from '../../../../platform/networking/common/networking';
11
import { ITestingServicesAccessor } from '../../../../platform/test/node/services';
12
import { DisposableStore } from '../../../../util/vs/base/common/lifecycle';
13
import { IInstantiationService } from '../../../../util/vs/platform/instantiation/common/instantiation';
14
import { createExtensionUnitTestingServices } from '../../../test/node/services';
15
import { OpenAIEndpoint } from '../openAIEndpoint';
16
17
// Test fixtures for thinking content
18
const createThinkingMessage = (thinkingId: string, thinkingText: string): Raw.ChatMessage => ({
19
role: Raw.ChatRole.Assistant,
20
content: [
21
{
22
type: Raw.ChatCompletionContentPartKind.Opaque,
23
value: {
24
type: 'thinking',
25
thinking: {
26
id: thinkingId,
27
text: thinkingText
28
}
29
}
30
}
31
]
32
});
33
34
const createTestOptions = (messages: Raw.ChatMessage[]): ICreateEndpointBodyOptions => ({
35
debugName: 'test',
36
messages,
37
requestId: 'test-req-123',
38
postOptions: {},
39
finishedCb: undefined,
40
location: undefined as any
41
});
42
43
describe('OpenAIEndpoint - Reasoning Properties', () => {
44
let modelMetadata: IChatModelInformation;
45
const disposables = new DisposableStore();
46
let accessor: ITestingServicesAccessor;
47
let instaService: IInstantiationService;
48
49
beforeEach(() => {
50
modelMetadata = {
51
id: 'test-model',
52
name: 'Test Model',
53
vendor: 'Test Vendor',
54
version: '1.0',
55
model_picker_enabled: true,
56
is_chat_default: false,
57
is_chat_fallback: false,
58
supported_endpoints: [ModelSupportedEndpoint.ChatCompletions, ModelSupportedEndpoint.Responses],
59
capabilities: {
60
type: 'chat',
61
family: 'openai',
62
tokenizer: 'o200k_base' as any,
63
supports: {
64
parallel_tool_calls: false,
65
streaming: true,
66
tool_calls: false,
67
vision: false,
68
prediction: false,
69
thinking: true
70
},
71
limits: {
72
max_prompt_tokens: 4096,
73
max_output_tokens: 2048,
74
max_context_window_tokens: 6144
75
}
76
}
77
};
78
79
const testingServiceCollection = createExtensionUnitTestingServices();
80
accessor = disposables.add(testingServiceCollection.createTestingAccessor());
81
instaService = accessor.get(IInstantiationService);
82
});
83
84
afterEach(() => {
85
disposables.clear();
86
});
87
88
describe('CAPI mode (useResponsesApi = false)', () => {
89
it('should set cot_id and cot_summary properties when processing thinking content', () => {
90
const endpoint = instaService.createInstance(OpenAIEndpoint,
91
{
92
...modelMetadata,
93
supported_endpoints: [ModelSupportedEndpoint.ChatCompletions]
94
},
95
'test-api-key',
96
'https://api.openai.com/v1/chat/completions');
97
98
const thinkingMessage = createThinkingMessage('test-thinking-123', 'this is my reasoning');
99
const options = createTestOptions([thinkingMessage]);
100
101
const body = endpoint.createRequestBody(options);
102
103
expect(body.messages).toBeDefined();
104
const messages = body.messages as any[];
105
expect(messages).toHaveLength(1);
106
expect(messages[0].cot_id).toBe('test-thinking-123');
107
expect(messages[0].cot_summary).toBe('this is my reasoning');
108
});
109
110
it('should handle multiple messages with thinking content', () => {
111
const endpoint = instaService.createInstance(OpenAIEndpoint,
112
{
113
...modelMetadata,
114
supported_endpoints: [ModelSupportedEndpoint.ChatCompletions]
115
},
116
'test-api-key',
117
'https://api.openai.com/v1/chat/completions');
118
119
const userMessage: Raw.ChatMessage = {
120
role: Raw.ChatRole.User,
121
content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'Hello' }]
122
};
123
const thinkingMessage = createThinkingMessage('reasoning-456', 'complex reasoning here');
124
const options = createTestOptions([userMessage, thinkingMessage]);
125
126
const body = endpoint.createRequestBody(options);
127
128
expect(body.messages).toBeDefined();
129
const messages = body.messages as any[];
130
expect(messages).toHaveLength(2);
131
132
// User message should not have thinking properties
133
expect(messages[0].cot_id).toBeUndefined();
134
expect(messages[0].cot_summary).toBeUndefined();
135
136
// Assistant message should have thinking properties
137
expect(messages[1].cot_id).toBe('reasoning-456');
138
expect(messages[1].cot_summary).toBe('complex reasoning here');
139
});
140
});
141
142
describe('Responses API mode (useResponsesApi = true)', () => {
143
it('should preserve reasoning object when thinking is supported', () => {
144
accessor.get(IConfigurationService).setConfig(ConfigKey.ResponsesApiReasoningSummary, 'detailed');
145
const endpoint = instaService.createInstance(OpenAIEndpoint,
146
modelMetadata,
147
'test-api-key',
148
'https://api.openai.com/v1/chat/completions');
149
150
const thinkingMessage = createThinkingMessage('resp-api-789', 'responses api reasoning');
151
const options = createTestOptions([thinkingMessage]);
152
153
const body = endpoint.createRequestBody(options);
154
155
expect(body.store).toBe(true);
156
expect(body.n).toBeUndefined();
157
expect(body.stream_options).toBeUndefined();
158
expect(body.reasoning).toBeDefined(); // Should preserve reasoning object
159
});
160
161
it('should remove reasoning object when thinking is not supported', () => {
162
const modelWithoutThinking = {
163
...modelMetadata,
164
capabilities: {
165
...modelMetadata.capabilities,
166
supports: {
167
...modelMetadata.capabilities.supports,
168
thinking: false
169
}
170
}
171
};
172
173
accessor.get(IConfigurationService).setConfig(ConfigKey.ResponsesApiReasoningSummary, 'detailed');
174
const endpoint = instaService.createInstance(OpenAIEndpoint,
175
modelWithoutThinking,
176
'test-api-key',
177
'https://api.openai.com/v1/chat/completions');
178
179
const thinkingMessage = createThinkingMessage('no-thinking-999', 'should be removed');
180
const options = createTestOptions([thinkingMessage]);
181
182
const body = endpoint.createRequestBody(options);
183
184
expect(body.reasoning).toBeUndefined(); // Should be removed
185
});
186
});
187
});
188