Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/byok/vscode-node/test/geminiNativeProvider.spec.ts
13405 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { beforeEach, describe, expect, it, vi } from 'vitest';
7
import * as vscode from 'vscode';
8
import { NoopOTelService, resolveOTelConfig } from '../../../../platform/otel/common/index';
9
import type { CapturingToken } from '../../../../platform/requestLogger/common/capturingToken';
10
import type { IRequestLogger } from '../../../../platform/requestLogger/common/requestLogger';
11
import { NullTelemetryService } from '../../../../platform/telemetry/common/nullTelemetryService';
12
import { TestLogService } from '../../../../platform/testing/common/testLogService';
13
import type { IBYOKStorageService } from '../byokStorageService';
14
15
const mockHandleAPIKeyUpdate = vi.fn();
16
17
vi.mock('@google/genai', () => {
18
class MockGoogleGenAI {
19
public static createdWithApiKeys: string[] = [];
20
public static streamChunks: any[] = [];
21
public static listModelsResult: AsyncIterable<any> = (async function* () { })();
22
23
public readonly apiKey: string;
24
public readonly models: {
25
list: () => Promise<AsyncIterable<any>>;
26
generateContentStream: (params: unknown) => Promise<AsyncIterable<any>>;
27
};
28
29
constructor(opts: { apiKey: string }) {
30
this.apiKey = opts.apiKey;
31
MockGoogleGenAI.createdWithApiKeys.push(opts.apiKey);
32
this.models = {
33
list: async () => MockGoogleGenAI.listModelsResult,
34
generateContentStream: async () => (async function* () {
35
for (const c of MockGoogleGenAI.streamChunks) {
36
yield c;
37
}
38
})()
39
};
40
}
41
}
42
43
return {
44
GoogleGenAI: MockGoogleGenAI,
45
Type: { OBJECT: 'object' },
46
};
47
});
48
49
vi.mock('../../common/byokProvider', async (importOriginal) => {
50
const actual = await importOriginal<typeof import('../../common/byokProvider')>();
51
return {
52
...actual,
53
handleAPIKeyUpdate: mockHandleAPIKeyUpdate,
54
};
55
});
56
57
type ProgressItem = vscode.LanguageModelResponsePart2;
58
59
class TestProgress implements vscode.Progress<ProgressItem> {
60
public readonly items: ProgressItem[] = [];
61
report(value: ProgressItem): void {
62
this.items.push(value);
63
}
64
}
65
66
function createStorageService(overrides?: Partial<IBYOKStorageService>): IBYOKStorageService {
67
return {
68
getAPIKey: vi.fn().mockResolvedValue(undefined),
69
storeAPIKey: vi.fn().mockResolvedValue(undefined),
70
deleteAPIKey: vi.fn().mockResolvedValue(undefined),
71
getStoredModelConfigs: vi.fn().mockResolvedValue({}),
72
saveModelConfig: vi.fn().mockResolvedValue(undefined),
73
removeModelConfig: vi.fn().mockResolvedValue(undefined),
74
...overrides,
75
};
76
}
77
78
function createRequestLogger(): IRequestLogger {
79
const didChangeEmitter = new vscode.EventEmitter<void>();
80
return {
81
_serviceBrand: undefined,
82
promptRendererTracing: false,
83
captureInvocation: async <T>(_request: CapturingToken, fn: () => Promise<T>) => fn(),
84
logToolCall: () => undefined,
85
logModelListCall: () => undefined,
86
logChatRequest: () => ({
87
markTimeToFirstToken: () => undefined,
88
resolveWithCancelation: () => undefined,
89
resolve: () => undefined,
90
}),
91
addPromptTrace: () => undefined,
92
addEntry: () => undefined,
93
onDidChangeRequests: didChangeEmitter.event,
94
getRequests: () => [],
95
enableWorkspaceEditTracing: () => undefined,
96
disableWorkspaceEditTracing: () => undefined,
97
} as unknown as IRequestLogger;
98
}
99
100
describe('GeminiNativeBYOKLMProvider', () => {
101
beforeEach(() => {
102
vi.clearAllMocks();
103
});
104
105
it.skip('throws a clear error when no API key is configured (no silent return)', async () => {
106
const { GeminiNativeBYOKLMProvider } = await import('../geminiNativeProvider');
107
const storage = createStorageService({ getAPIKey: vi.fn().mockResolvedValue(undefined) });
108
const provider = new GeminiNativeBYOKLMProvider(undefined, storage, new TestLogService(), createRequestLogger(), new NullTelemetryService(), new NoopOTelService(resolveOTelConfig({ env: {}, extensionVersion: '1.0.0', sessionId: 'test' })));
109
110
const model: vscode.LanguageModelChatInformation = {
111
id: 'gemini-2.0-flash',
112
name: 'Gemini 2.0 Flash',
113
family: 'Gemini',
114
version: '1.0.0',
115
maxInputTokens: 1000,
116
maxOutputTokens: 1000,
117
capabilities: { toolCalling: false, imageInput: false }
118
};
119
const messages: vscode.LanguageModelChatMessage[] = [
120
new vscode.LanguageModelChatMessage(vscode.LanguageModelChatMessageRole.User, 'hello')
121
];
122
123
const tokenSource = new vscode.CancellationTokenSource();
124
const progress = new TestProgress();
125
await expect(provider.provideLanguageModelChatResponse(
126
model,
127
messages,
128
{ requestInitiator: 'test', tools: [], toolMode: vscode.LanguageModelChatToolMode.Auto },
129
progress,
130
tokenSource.token
131
)).rejects.toThrow(/No API key configured/i);
132
});
133
134
// it.skip('initializes the Gemini client on API key update and can stream a response', async () => {
135
// const { GeminiNativeBYOKLMProvider } = await import('../geminiNativeProvider');
136
// const genai = await import('@google/genai');
137
// const MockGoogleGenAI = genai.GoogleGenAI as unknown as { createdWithApiKeys: string[]; streamChunks: any[] };
138
// MockGoogleGenAI.createdWithApiKeys.length = 0;
139
// MockGoogleGenAI.streamChunks.length = 0;
140
// MockGoogleGenAI.streamChunks.push({
141
// candidates: [{
142
// content: { parts: [{ text: 'Hello from Gemini' }] }
143
// }]
144
// });
145
146
// mockHandleAPIKeyUpdate.mockResolvedValue({ apiKey: 'k_test', deleted: false, cancelled: false });
147
148
// const storage = createStorageService({ getAPIKey: vi.fn().mockResolvedValue('k_test') });
149
// const provider = new GeminiNativeBYOKLMProvider(undefined, storage, new TestLogService(), createRequestLogger());
150
151
// await provider.updateAPIKey();
152
// expect(MockGoogleGenAI.createdWithApiKeys).toEqual(['k_test']);
153
154
// const model: vscode.LanguageModelChatInformation = {
155
// id: 'gemini-2.0-flash',
156
// name: 'Gemini 2.0 Flash',
157
// family: 'Gemini',
158
// version: '1.0.0',
159
// maxInputTokens: 1000,
160
// maxOutputTokens: 1000,
161
// capabilities: { toolCalling: false, imageInput: false }
162
// };
163
// const messages: vscode.LanguageModelChatMessage[] = [
164
// new vscode.LanguageModelChatMessage(vscode.LanguageModelChatMessageRole.User, 'hello')
165
// ];
166
167
// const tokenSource = new vscode.CancellationTokenSource();
168
// const progress = new TestProgress();
169
// await provider.provideLanguageModelChatResponse(
170
// model,
171
// messages,
172
// { requestInitiator: 'test', tools: [], toolMode: vscode.LanguageModelChatToolMode.Auto },
173
// progress,
174
// tokenSource.token
175
// );
176
177
// expect(progress.items.some(p => p instanceof vscode.LanguageModelTextPart && p.value.includes('Hello from Gemini'))).toBe(true);
178
// });
179
180
// it.skip('clears the client when API key is deleted via update flow', async () => {
181
// const { GeminiNativeBYOKLMProvider } = await import('../geminiNativeProvider');
182
// const genai = await import('@google/genai');
183
// const MockGoogleGenAI = genai.GoogleGenAI as unknown as { createdWithApiKeys: string[]; streamChunks: any[] };
184
// MockGoogleGenAI.createdWithApiKeys.length = 0;
185
// MockGoogleGenAI.streamChunks.length = 0;
186
187
// const storage = createStorageService({ getAPIKey: vi.fn().mockResolvedValue(undefined) });
188
// const provider = new GeminiNativeBYOKLMProvider(undefined, storage, new TestLogService(), createRequestLogger());
189
190
// // First set a key
191
// mockHandleAPIKeyUpdate.mockResolvedValueOnce({ apiKey: 'k_initial', deleted: false, cancelled: false });
192
// await provider.updateAPIKey();
193
// expect(MockGoogleGenAI.createdWithApiKeys).toEqual(['k_initial']);
194
195
// // Then delete it
196
// mockHandleAPIKeyUpdate.mockResolvedValueOnce({ apiKey: undefined, deleted: true, cancelled: false });
197
// await provider.updateAPIKey();
198
199
// const model: vscode.LanguageModelChatInformation = {
200
// id: 'gemini-2.0-flash',
201
// name: 'Gemini 2.0 Flash',
202
// family: 'Gemini',
203
// version: '1.0.0',
204
// maxInputTokens: 1000,
205
// maxOutputTokens: 1000,
206
// capabilities: { toolCalling: false, imageInput: false }
207
// };
208
// const messages: vscode.LanguageModelChatMessage[] = [
209
// new vscode.LanguageModelChatMessage(vscode.LanguageModelChatMessageRole.User, 'hello')
210
// ];
211
212
// const tokenSource = new vscode.CancellationTokenSource();
213
// const progress = new TestProgress();
214
// await expect(provider.provideLanguageModelChatResponse(
215
// model,
216
// messages,
217
// { requestInitiator: 'test', tools: [], toolMode: vscode.LanguageModelChatToolMode.Auto },
218
// progress,
219
// tokenSource.token
220
// )).rejects.toThrow(/No API key configured/i);
221
// });
222
223
it.skip('prompts for a new API key when listing models fails with an invalid key', async () => {
224
const { GeminiNativeBYOKLMProvider } = await import('../geminiNativeProvider');
225
const genai = await import('@google/genai');
226
const MockGoogleGenAI = genai.GoogleGenAI as unknown as { listModelsResult: AsyncIterable<any> };
227
// Simulate the models.list() call throwing an invalid API key error when iterated
228
MockGoogleGenAI.listModelsResult = (async function* () {
229
throw new Error('ApiError: {"error":{"message":"API key not valid. Please pass a valid API key.","details":[{"reason":"API_KEY_INVALID"}]}}');
230
})();
231
232
const storage = createStorageService({
233
getAPIKey: vi.fn().mockResolvedValue('bad_key'),
234
});
235
236
mockHandleAPIKeyUpdate.mockResolvedValue({ apiKey: undefined, deleted: false, cancelled: true });
237
238
const provider = new GeminiNativeBYOKLMProvider(undefined, storage, new TestLogService(), createRequestLogger(), new NullTelemetryService(), new NoopOTelService(resolveOTelConfig({ env: {}, extensionVersion: '1.0.0', sessionId: 'test' })));
239
const tokenSource = new vscode.CancellationTokenSource();
240
const models = await provider.provideLanguageModelChatInformation({ silent: false }, tokenSource.token);
241
242
// When the key is invalid, we should re-prompt for a new one
243
// and handle the failure gracefully by returning an empty list.
244
expect(models).toEqual([]);
245
expect(mockHandleAPIKeyUpdate).toHaveBeenCalled();
246
});
247
248
it.skip('retries listing models after re-prompting with a valid API key', async () => {
249
const { GeminiNativeBYOKLMProvider } = await import('../geminiNativeProvider');
250
const genai = await import('@google/genai');
251
const MockGoogleGenAI = genai.GoogleGenAI as unknown as { listModelsResult: AsyncIterable<any> };
252
253
let iterationCount = 0;
254
let hasThrown = false;
255
const modelId = 'test-model';
256
257
MockGoogleGenAI.listModelsResult = {
258
async *[Symbol.asyncIterator]() {
259
iterationCount++;
260
if (!hasThrown) {
261
hasThrown = true;
262
throw new Error('ApiError: {"error":{"message":"API key not valid. Please pass a valid API key.","details":[{"reason":"API_KEY_INVALID"}]}}');
263
}
264
yield { name: modelId };
265
}
266
};
267
268
const storage = createStorageService({
269
getAPIKey: vi.fn().mockResolvedValue('bad_key'),
270
});
271
272
mockHandleAPIKeyUpdate.mockResolvedValue({ apiKey: 'k_new', deleted: false, cancelled: false });
273
274
const knownModels = {
275
[modelId]: {
276
name: 'Test Model',
277
maxInputTokens: 1000,
278
maxOutputTokens: 1000,
279
toolCalling: false,
280
vision: false
281
}
282
};
283
284
const provider = new GeminiNativeBYOKLMProvider(knownModels, storage, new TestLogService(), createRequestLogger(), new NullTelemetryService(), new NoopOTelService(resolveOTelConfig({ env: {}, extensionVersion: '1.0.0', sessionId: 'test' })));
285
const tokenSource = new vscode.CancellationTokenSource();
286
const models = await provider.provideLanguageModelChatInformation({ silent: false }, tokenSource.token);
287
288
// First attempt should fail with invalid key, then after re-prompting
289
// we should retry listing models and succeed with the new key.
290
expect(models.map(m => m.id)).toEqual([modelId]);
291
expect(iterationCount).toBe(2);
292
expect(mockHandleAPIKeyUpdate).toHaveBeenCalled();
293
});
294
});
295
296