Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/src/vs/workbench/api/common/extHostLanguageModels.ts
5221 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import type * as vscode from 'vscode';
7
import { AsyncIterableProducer, AsyncIterableSource, RunOnceScheduler } from '../../../base/common/async.js';
8
import { VSBuffer } from '../../../base/common/buffer.js';
9
import { CancellationToken } from '../../../base/common/cancellation.js';
10
import { SerializedError, transformErrorForSerialization, transformErrorFromSerialization } from '../../../base/common/errors.js';
11
import { Emitter, Event } from '../../../base/common/event.js';
12
import { Iterable } from '../../../base/common/iterator.js';
13
import { IDisposable, toDisposable } from '../../../base/common/lifecycle.js';
14
import { URI, UriComponents } from '../../../base/common/uri.js';
15
import { localize } from '../../../nls.js';
16
import { ExtensionIdentifier, ExtensionIdentifierMap, ExtensionIdentifierSet, IExtensionDescription } from '../../../platform/extensions/common/extensions.js';
17
import { createDecorator } from '../../../platform/instantiation/common/instantiation.js';
18
import { ILogService } from '../../../platform/log/common/log.js';
19
import { Progress } from '../../../platform/progress/common/progress.js';
20
import { IChatMessage, IChatResponsePart, ILanguageModelChatInfoOptions, ILanguageModelChatMetadata, ILanguageModelChatMetadataAndIdentifier } from '../../contrib/chat/common/languageModels.js';
21
import { DEFAULT_MODEL_PICKER_CATEGORY } from '../../contrib/chat/common/widget/input/modelPickerWidget.js';
22
import { INTERNAL_AUTH_PROVIDER_PREFIX } from '../../services/authentication/common/authentication.js';
23
import { checkProposedApiEnabled, isProposedApiEnabled } from '../../services/extensions/common/extensions.js';
24
import { SerializableObjectWithBuffers } from '../../services/extensions/common/proxyIdentifier.js';
25
import { ExtHostLanguageModelsShape, MainContext, MainThreadLanguageModelsShape } from './extHost.protocol.js';
26
import { IExtHostAuthentication } from './extHostAuthentication.js';
27
import { IExtHostRpcService } from './extHostRpcService.js';
28
import * as typeConvert from './extHostTypeConverters.js';
29
import * as extHostTypes from './extHostTypes.js';
30
import { ChatAgentLocation } from '../../contrib/chat/common/constants.js';
31
32
export interface IExtHostLanguageModels extends ExtHostLanguageModels { }
33
34
export const IExtHostLanguageModels = createDecorator<IExtHostLanguageModels>('IExtHostLanguageModels');
35
36
type LanguageModelProviderData = {
37
readonly extension: IExtensionDescription;
38
readonly provider: vscode.LanguageModelChatProvider;
39
};
40
41
type LMResponsePart = vscode.LanguageModelTextPart | vscode.LanguageModelToolCallPart | vscode.LanguageModelDataPart | vscode.LanguageModelThinkingPart;
42
43
44
class LanguageModelResponse {
45
46
readonly apiObject: vscode.LanguageModelChatResponse;
47
48
private readonly _defaultStream = new AsyncIterableSource<LMResponsePart>();
49
private _isDone: boolean = false;
50
51
constructor() {
52
53
const that = this;
54
55
const [stream1, stream2] = AsyncIterableProducer.tee(that._defaultStream.asyncIterable);
56
57
this.apiObject = {
58
// result: promise,
59
get stream() {
60
return stream1;
61
},
62
get text() {
63
return stream2.map(part => {
64
if (part instanceof extHostTypes.LanguageModelTextPart) {
65
return part.value;
66
} else {
67
return undefined;
68
}
69
}).coalesce();
70
},
71
};
72
}
73
74
handleResponsePart(parts: IChatResponsePart | IChatResponsePart[]): void {
75
if (this._isDone) {
76
return;
77
}
78
79
const lmResponseParts: LMResponsePart[] = [];
80
81
for (const part of Iterable.wrap(parts)) {
82
83
let out: LMResponsePart;
84
if (part.type === 'text') {
85
out = new extHostTypes.LanguageModelTextPart(part.value, part.audience);
86
} else if (part.type === 'thinking') {
87
out = new extHostTypes.LanguageModelThinkingPart(part.value, part.id, part.metadata);
88
89
} else if (part.type === 'data') {
90
out = new extHostTypes.LanguageModelDataPart(part.data.buffer, part.mimeType, part.audience);
91
} else {
92
out = new extHostTypes.LanguageModelToolCallPart(part.toolCallId, part.name, part.parameters);
93
}
94
lmResponseParts.push(out);
95
}
96
97
this._defaultStream.emitMany(lmResponseParts);
98
}
99
100
reject(err: Error): void {
101
this._isDone = true;
102
this._defaultStream.reject(err);
103
}
104
105
resolve(): void {
106
this._isDone = true;
107
this._defaultStream.resolve();
108
}
109
}
110
111
export class ExtHostLanguageModels implements ExtHostLanguageModelsShape {
112
113
declare _serviceBrand: undefined;
114
115
private static _idPool = 1;
116
117
private readonly _proxy: MainThreadLanguageModelsShape;
118
private readonly _onDidChangeModelAccess = new Emitter<{ from: ExtensionIdentifier; to: ExtensionIdentifier }>();
119
private readonly _onDidChangeProviders = new Emitter<void>();
120
readonly onDidChangeProviders = this._onDidChangeProviders.event;
121
private readonly _onDidChangeModelProxyAvailability = new Emitter<void>();
122
readonly onDidChangeModelProxyAvailability = this._onDidChangeModelProxyAvailability.event;
123
124
private readonly _languageModelProviders = new Map<string, LanguageModelProviderData>();
125
// TODO @lramos15 - Remove the need for both info and metadata as it's a lot of redundancy. Should just need one
126
private readonly _localModels = new Map<string, { group: string | undefined; metadata: ILanguageModelChatMetadata; info: vscode.LanguageModelChatInformation }>();
127
private readonly _modelAccessList = new ExtensionIdentifierMap<ExtensionIdentifierSet>();
128
private readonly _pendingRequest = new Map<number, { languageModelId: string; res: LanguageModelResponse }>();
129
private readonly _ignoredFileProviders = new Map<number, vscode.LanguageModelIgnoredFileProvider>();
130
private _languageModelProxyProvider: vscode.LanguageModelProxyProvider | undefined;
131
132
constructor(
133
@IExtHostRpcService extHostRpc: IExtHostRpcService,
134
@ILogService private readonly _logService: ILogService,
135
@IExtHostAuthentication private readonly _extHostAuthentication: IExtHostAuthentication,
136
) {
137
this._proxy = extHostRpc.getProxy(MainContext.MainThreadLanguageModels);
138
}
139
140
dispose(): void {
141
this._onDidChangeModelAccess.dispose();
142
this._onDidChangeProviders.dispose();
143
this._onDidChangeModelProxyAvailability.dispose();
144
}
145
146
registerLanguageModelChatProvider(extension: IExtensionDescription, vendor: string, provider: vscode.LanguageModelChatProvider): IDisposable {
147
148
this._languageModelProviders.set(vendor, { extension: extension, provider });
149
this._proxy.$registerLanguageModelProvider(vendor);
150
151
let providerChangeEventDisposable: IDisposable | undefined;
152
if (provider.onDidChangeLanguageModelChatInformation) {
153
providerChangeEventDisposable = provider.onDidChangeLanguageModelChatInformation(() => {
154
this._proxy.$onLMProviderChange(vendor);
155
});
156
}
157
158
return toDisposable(() => {
159
this._languageModelProviders.delete(vendor);
160
this._localModels.forEach((value, key) => {
161
if (value.metadata.vendor === vendor) {
162
this._localModels.delete(key);
163
}
164
});
165
providerChangeEventDisposable?.dispose();
166
this._proxy.$unregisterProvider(vendor);
167
});
168
}
169
170
private toModelIdentifier(vendor: string, group: string | undefined, modelId: string): string {
171
return group ? `${vendor}/${group}/${modelId}` : `${vendor}/${modelId}`;
172
}
173
174
private getVendorFromModelIdentifier(modelIdentifier: string): string | undefined {
175
const firstSlash = modelIdentifier.indexOf('/');
176
return firstSlash === -1 ? undefined : modelIdentifier.substring(0, firstSlash);
177
}
178
179
async $provideLanguageModelChatInfo(vendor: string, options: ILanguageModelChatInfoOptions, token: CancellationToken): Promise<ILanguageModelChatMetadataAndIdentifier[]> {
180
const data = this._languageModelProviders.get(vendor);
181
if (!data) {
182
return [];
183
}
184
const modelInformation: vscode.LanguageModelChatInformation[] = await data.provider.provideLanguageModelChatInformation({ silent: options.silent, configuration: options.configuration }, token) ?? [];
185
const modelMetadataAndIdentifier: ILanguageModelChatMetadataAndIdentifier[] = modelInformation.map((m): ILanguageModelChatMetadataAndIdentifier => {
186
let auth;
187
if (m.requiresAuthorization && isProposedApiEnabled(data.extension, 'chatProvider')) {
188
auth = {
189
providerLabel: data.extension.displayName || data.extension.name,
190
accountLabel: typeof m.requiresAuthorization === 'object' ? m.requiresAuthorization.label : undefined
191
};
192
}
193
if (m.capabilities.editTools) {
194
checkProposedApiEnabled(data.extension, 'chatProvider');
195
}
196
197
const isDefaultForLocation: { [K in ChatAgentLocation]?: boolean } = {};
198
if (isProposedApiEnabled(data.extension, 'chatProvider')) {
199
if (m.isDefault === true) {
200
for (const key of Object.values(ChatAgentLocation)) {
201
if (typeof key === 'string') {
202
isDefaultForLocation[key as ChatAgentLocation] = true;
203
}
204
}
205
} else if (typeof m.isDefault === 'object') {
206
for (const key of Object.keys(m.isDefault)) {
207
const enumKey = parseInt(key) as extHostTypes.ChatLocation;
208
isDefaultForLocation[typeConvert.ChatLocation.from(enumKey)] = m.isDefault[enumKey];
209
}
210
}
211
}
212
213
return {
214
metadata: {
215
extension: data.extension.identifier,
216
id: m.id,
217
vendor,
218
name: m.name ?? '',
219
family: m.family ?? '',
220
detail: m.detail,
221
tooltip: m.tooltip,
222
version: m.version,
223
multiplier: m.multiplier,
224
multiplierNumeric: m.multiplierNumeric,
225
maxInputTokens: m.maxInputTokens,
226
maxOutputTokens: m.maxOutputTokens,
227
auth,
228
isDefaultForLocation,
229
isUserSelectable: m.isUserSelectable,
230
statusIcon: m.statusIcon,
231
modelPickerCategory: m.category ?? DEFAULT_MODEL_PICKER_CATEGORY,
232
capabilities: m.capabilities ? {
233
vision: m.capabilities.imageInput,
234
editTools: m.capabilities.editTools,
235
toolCalling: !!m.capabilities.toolCalling,
236
agentMode: !!m.capabilities.toolCalling
237
} : undefined,
238
},
239
identifier: this.toModelIdentifier(vendor, options.group, m.id)
240
};
241
});
242
243
this._localModels.forEach((value, key) => {
244
if (value.metadata.vendor === vendor && value.group === options.group) {
245
this._localModels.delete(key);
246
}
247
});
248
249
for (let i = 0; i < modelMetadataAndIdentifier.length; i++) {
250
this._localModels.set(modelMetadataAndIdentifier[i].identifier, {
251
group: options.group,
252
metadata: modelMetadataAndIdentifier[i].metadata,
253
info: modelInformation[i]
254
});
255
}
256
257
return modelMetadataAndIdentifier;
258
}
259
260
async $startChatRequest(modelId: string, requestId: number, from: ExtensionIdentifier, messages: SerializableObjectWithBuffers<IChatMessage[]>, options: vscode.LanguageModelChatRequestOptions, token: CancellationToken): Promise<void> {
261
const knownModel = this._localModels.get(modelId);
262
if (!knownModel) {
263
throw new Error('Model not found');
264
}
265
266
const data = this._languageModelProviders.get(knownModel.metadata.vendor);
267
if (!data) {
268
throw new Error(`Language model provider for '${knownModel.metadata.id}' not found.`);
269
}
270
271
const queue: IChatResponsePart[] = [];
272
const sendNow = () => {
273
if (queue.length > 0) {
274
this._proxy.$reportResponsePart(requestId, new SerializableObjectWithBuffers(queue));
275
queue.length = 0;
276
}
277
};
278
const queueScheduler = new RunOnceScheduler(sendNow, 30);
279
const sendSoon = (part: IChatResponsePart) => {
280
const newLen = queue.push(part);
281
// flush/send if things pile up more than expected
282
if (newLen > 30) {
283
sendNow();
284
queueScheduler.cancel();
285
} else {
286
queueScheduler.schedule();
287
}
288
};
289
290
const progress = new Progress<vscode.LanguageModelTextPart | vscode.LanguageModelToolCallPart | vscode.LanguageModelDataPart | vscode.LanguageModelThinkingPart>(async fragment => {
291
if (token.isCancellationRequested) {
292
this._logService.warn(`[CHAT](${data.extension.identifier.value}) CANNOT send progress because the REQUEST IS CANCELLED`);
293
return;
294
}
295
296
let part: IChatResponsePart | undefined;
297
if (fragment instanceof extHostTypes.LanguageModelToolCallPart) {
298
part = { type: 'tool_use', name: fragment.name, parameters: fragment.input, toolCallId: fragment.callId };
299
} else if (fragment instanceof extHostTypes.LanguageModelTextPart) {
300
part = { type: 'text', value: fragment.value, audience: fragment.audience };
301
} else if (fragment instanceof extHostTypes.LanguageModelDataPart) {
302
part = { type: 'data', mimeType: fragment.mimeType, data: VSBuffer.wrap(fragment.data), audience: fragment.audience };
303
} else if (fragment instanceof extHostTypes.LanguageModelThinkingPart) {
304
part = { type: 'thinking', value: fragment.value, id: fragment.id, metadata: fragment.metadata };
305
}
306
307
if (!part) {
308
this._logService.warn(`[CHAT](${data.extension.identifier.value}) UNKNOWN part ${JSON.stringify(fragment)}`);
309
return;
310
}
311
312
sendSoon(part);
313
});
314
315
let value: unknown;
316
317
try {
318
value = data.provider.provideLanguageModelChatResponse(
319
knownModel.info,
320
messages.value.map(typeConvert.LanguageModelChatMessage2.to),
321
{ ...options, modelOptions: options.modelOptions ?? {}, requestInitiator: ExtensionIdentifier.toKey(from), toolMode: options.toolMode ?? extHostTypes.LanguageModelChatToolMode.Auto },
322
progress,
323
token
324
);
325
326
} catch (err) {
327
// synchronously failed
328
throw err;
329
}
330
331
Promise.resolve(value).then(() => {
332
sendNow();
333
this._proxy.$reportResponseDone(requestId, undefined);
334
}, err => {
335
sendNow();
336
this._proxy.$reportResponseDone(requestId, transformErrorForSerialization(err));
337
});
338
}
339
340
//#region --- token counting
341
342
$provideTokenLength(modelId: string, value: string, token: CancellationToken): Promise<number> {
343
const knownModel = this._localModels.get(modelId);
344
if (!knownModel) {
345
return Promise.resolve(0);
346
}
347
const data = this._languageModelProviders.get(knownModel.metadata.vendor);
348
if (!data) {
349
return Promise.resolve(0);
350
}
351
return Promise.resolve(data.provider.provideTokenCount(knownModel.info, value, token));
352
}
353
354
355
//#region --- making request
356
357
async getDefaultLanguageModel(extension: IExtensionDescription, forceResolveModels?: boolean): Promise<vscode.LanguageModelChat | undefined> {
358
let defaultModelId: string | undefined;
359
360
if (forceResolveModels) {
361
await this.selectLanguageModels(extension, {});
362
}
363
364
for (const [modelIdentifier, modelData] of this._localModels) {
365
if (modelData.metadata.isDefaultForLocation[ChatAgentLocation.Chat]) {
366
defaultModelId = modelIdentifier;
367
break;
368
}
369
}
370
if (!defaultModelId && !forceResolveModels) {
371
// Maybe the default wasn't cached so we will try again with resolving the models too
372
return this.getDefaultLanguageModel(extension, true);
373
}
374
return this.getLanguageModelByIdentifier(extension, defaultModelId);
375
}
376
377
async getLanguageModelByIdentifier(extension: IExtensionDescription, modelId: string | undefined): Promise<vscode.LanguageModelChat | undefined> {
378
if (!modelId) {
379
return undefined;
380
}
381
382
let model = this._localModels.get(modelId);
383
if (!model) {
384
// model gone? is this an error on us? Try to resolve model again
385
this._logService.warn(`[LanguageModelProxy](${extension.identifier.value}) Could not find model '${modelId}' in local cache. Trying to resolve model again.`);
386
const vendor = this.getVendorFromModelIdentifier(modelId);
387
if (!vendor) {
388
this._logService.warn(`[LanguageModelProxy](${extension.identifier.value}) Could not extract vendor from model identifier '${modelId}'.`);
389
return undefined;
390
}
391
await this.selectLanguageModels(extension, { vendor });
392
model = this._localModels.get(modelId);
393
if (!model) {
394
this._logService.warn(`[LanguageModelProxy](${extension.identifier.value}) Could not find model '${modelId}' in local cache after re-resolving models.`);
395
return undefined;
396
}
397
}
398
399
// make sure auth information is correct
400
if (this._isUsingAuth(extension.identifier, model.metadata)) {
401
await this._fakeAuthPopulate(model.metadata);
402
}
403
404
let apiObject: vscode.LanguageModelChat | undefined;
405
if (!apiObject) {
406
const that = this;
407
apiObject = {
408
id: model.info.id,
409
vendor: model.metadata.vendor,
410
family: model.info.family,
411
version: model.info.version,
412
name: model.info.name,
413
capabilities: {
414
supportsImageToText: model.metadata.capabilities?.vision ?? false,
415
supportsToolCalling: !!model.metadata.capabilities?.toolCalling,
416
editToolsHint: model.metadata.capabilities?.editTools,
417
},
418
maxInputTokens: model.metadata.maxInputTokens,
419
countTokens(text, token) {
420
if (!that._localModels.has(modelId)) {
421
throw extHostTypes.LanguageModelError.NotFound(modelId);
422
}
423
return that._computeTokenLength(modelId, text, token ?? CancellationToken.None);
424
},
425
sendRequest(messages, options, token) {
426
if (!that._localModels.has(modelId)) {
427
throw extHostTypes.LanguageModelError.NotFound(modelId);
428
}
429
return that._sendChatRequest(extension, modelId, messages, options ?? {}, token ?? CancellationToken.None);
430
}
431
};
432
433
Object.freeze(apiObject);
434
}
435
436
return apiObject;
437
}
438
439
async selectLanguageModels(extension: IExtensionDescription, selector: vscode.LanguageModelChatSelector) {
440
441
// this triggers extension activation
442
const models = await this._proxy.$selectChatModels({ ...selector, extension: extension.identifier });
443
444
const result: vscode.LanguageModelChat[] = [];
445
446
const modelPromises = models.map(identifier => this.getLanguageModelByIdentifier(extension, identifier));
447
const modelResults = await Promise.all(modelPromises);
448
for (const model of modelResults) {
449
if (model) {
450
result.push(model);
451
}
452
}
453
454
return result;
455
}
456
457
private async _sendChatRequest(extension: IExtensionDescription, languageModelId: string, messages: vscode.LanguageModelChatMessage2[], options: vscode.LanguageModelChatRequestOptions, token: CancellationToken) {
458
459
const internalMessages: IChatMessage[] = this._convertMessages(extension, messages);
460
461
const from = extension.identifier;
462
const metadata = this._localModels.get(languageModelId)?.metadata;
463
464
if (!metadata || !this._localModels.has(languageModelId)) {
465
throw extHostTypes.LanguageModelError.NotFound(`Language model '${languageModelId}' is unknown.`);
466
}
467
468
if (this._isUsingAuth(from, metadata)) {
469
const success = await this._getAuthAccess(extension, { identifier: metadata.extension, displayName: metadata.auth.providerLabel }, options.justification, false);
470
471
if (!success || !this._modelAccessList.get(from)?.has(metadata.extension)) {
472
throw extHostTypes.LanguageModelError.NoPermissions(`Language model '${languageModelId}' cannot be used by '${from.value}'.`);
473
}
474
}
475
476
const requestId = (Math.random() * 1e6) | 0;
477
const res = new LanguageModelResponse();
478
this._pendingRequest.set(requestId, { languageModelId, res });
479
480
try {
481
await this._proxy.$tryStartChatRequest(from, languageModelId, requestId, new SerializableObjectWithBuffers(internalMessages), options, token);
482
483
} catch (error) {
484
// error'ing here means that the request could NOT be started/made, e.g. wrong model, no access, etc, but
485
// later the response can fail as well. Those failures are communicated via the stream-object
486
this._pendingRequest.delete(requestId);
487
throw extHostTypes.LanguageModelError.tryDeserialize(error) ?? error;
488
}
489
490
return res.apiObject;
491
}
492
493
private _convertMessages(extension: IExtensionDescription, messages: vscode.LanguageModelChatMessage2[]) {
494
const internalMessages: IChatMessage[] = [];
495
for (const message of messages) {
496
if (message.role as number === extHostTypes.LanguageModelChatMessageRole.System) {
497
checkProposedApiEnabled(extension, 'languageModelSystem');
498
}
499
internalMessages.push(typeConvert.LanguageModelChatMessage2.from(message));
500
}
501
return internalMessages;
502
}
503
504
async $acceptResponsePart(requestId: number, chunk: SerializableObjectWithBuffers<IChatResponsePart | IChatResponsePart[]>): Promise<void> {
505
const data = this._pendingRequest.get(requestId);
506
if (data) {
507
data.res.handleResponsePart(chunk.value);
508
}
509
}
510
511
async $acceptResponseDone(requestId: number, error: SerializedError | undefined): Promise<void> {
512
const data = this._pendingRequest.get(requestId);
513
if (!data) {
514
return;
515
}
516
this._pendingRequest.delete(requestId);
517
if (error) {
518
// we error the stream because that's the only way to signal
519
// that the request has failed
520
data.res.reject(extHostTypes.LanguageModelError.tryDeserialize(error) ?? transformErrorFromSerialization(error));
521
} else {
522
data.res.resolve();
523
}
524
}
525
526
// BIG HACK: Using AuthenticationProviders to check access to Language Models
527
private async _getAuthAccess(from: IExtensionDescription, to: { identifier: ExtensionIdentifier; displayName: string }, justification: string | undefined, silent: boolean | undefined): Promise<boolean> {
528
// This needs to be done in both MainThread & ExtHost ChatProvider
529
const providerId = INTERNAL_AUTH_PROVIDER_PREFIX + to.identifier.value;
530
const session = await this._extHostAuthentication.getSession(from, providerId, [], { silent: true });
531
532
if (session) {
533
this.$updateModelAccesslist([{ from: from.identifier, to: to.identifier, enabled: true }]);
534
return true;
535
}
536
537
if (silent) {
538
return false;
539
}
540
541
try {
542
const detail = justification
543
? localize('chatAccessWithJustification', "Justification: {1}", to.displayName, justification)
544
: undefined;
545
await this._extHostAuthentication.getSession(from, providerId, [], { forceNewSession: { detail } });
546
this.$updateModelAccesslist([{ from: from.identifier, to: to.identifier, enabled: true }]);
547
return true;
548
549
} catch (err) {
550
// ignore
551
return false;
552
}
553
}
554
555
private _isUsingAuth(from: ExtensionIdentifier, toMetadata: ILanguageModelChatMetadata): toMetadata is ILanguageModelChatMetadata & { auth: NonNullable<ILanguageModelChatMetadata['auth']> } {
556
// If the 'to' extension uses an auth check
557
return !!toMetadata.auth
558
// And we're asking from a different extension
559
&& !ExtensionIdentifier.equals(toMetadata.extension, from);
560
}
561
562
private async _fakeAuthPopulate(metadata: ILanguageModelChatMetadata): Promise<void> {
563
564
if (!metadata.auth) {
565
return;
566
}
567
568
for (const from of this._languageAccessInformationExtensions) {
569
try {
570
await this._getAuthAccess(from, { identifier: metadata.extension, displayName: '' }, undefined, true);
571
} catch (err) {
572
this._logService.error('Fake Auth request failed');
573
this._logService.error(err);
574
}
575
}
576
}
577
578
private async _computeTokenLength(modelId: string, value: string | vscode.LanguageModelChatMessage2, token: vscode.CancellationToken): Promise<number> {
579
580
const data = this._localModels.get(modelId);
581
if (!data) {
582
throw extHostTypes.LanguageModelError.NotFound(`Language model '${modelId}' is unknown.`);
583
}
584
return this._languageModelProviders.get(data.metadata.vendor)?.provider.provideTokenCount(data.info, value, token) ?? 0;
585
// return this._proxy.$countTokens(languageModelId, (typeof value === 'string' ? value : typeConvert.LanguageModelChatMessage2.from(value)), token);
586
}
587
588
$updateModelAccesslist(data: { from: ExtensionIdentifier; to: ExtensionIdentifier; enabled: boolean }[]): void {
589
const updated = new Array<{ from: ExtensionIdentifier; to: ExtensionIdentifier }>();
590
for (const { from, to, enabled } of data) {
591
const set = this._modelAccessList.get(from) ?? new ExtensionIdentifierSet();
592
const oldValue = set.has(to);
593
if (oldValue !== enabled) {
594
if (enabled) {
595
set.add(to);
596
} else {
597
set.delete(to);
598
}
599
this._modelAccessList.set(from, set);
600
const newItem = { from, to };
601
updated.push(newItem);
602
this._onDidChangeModelAccess.fire(newItem);
603
}
604
}
605
}
606
607
private readonly _languageAccessInformationExtensions = new Set<Readonly<IExtensionDescription>>();
608
609
createLanguageModelAccessInformation(from: Readonly<IExtensionDescription>): vscode.LanguageModelAccessInformation {
610
611
this._languageAccessInformationExtensions.add(from);
612
613
// const that = this;
614
const _onDidChangeAccess = Event.signal(Event.filter(this._onDidChangeModelAccess.event, e => ExtensionIdentifier.equals(e.from, from.identifier)));
615
const _onDidAddRemove = Event.signal(this._onDidChangeProviders.event);
616
617
return {
618
get onDidChange() {
619
return Event.any(_onDidChangeAccess, _onDidAddRemove);
620
},
621
canSendRequest(chat: vscode.LanguageModelChat): boolean | undefined {
622
return true;
623
// TODO @lramos15 - Fix
624
625
// let metadata: ILanguageModelChatMetadata | undefined;
626
627
// out: for (const [_, value] of that._allLanguageModelData) {
628
// for (const candidate of value.apiObjects.values()) {
629
// if (candidate === chat) {
630
// metadata = value.metadata;
631
// break out;
632
// }
633
// }
634
// }
635
// if (!metadata) {
636
// return undefined;
637
// }
638
// if (!that._isUsingAuth(from.identifier, metadata)) {
639
// return true;
640
// }
641
642
// const list = that._modelAccessList.get(from.identifier);
643
// if (!list) {
644
// return undefined;
645
// }
646
// return list.has(metadata.extension);
647
}
648
};
649
}
650
651
fileIsIgnored(extension: IExtensionDescription, uri: vscode.Uri, token: vscode.CancellationToken = CancellationToken.None): Promise<boolean> {
652
checkProposedApiEnabled(extension, 'chatParticipantAdditions');
653
654
return this._proxy.$fileIsIgnored(uri, token);
655
}
656
657
get isModelProxyAvailable(): boolean {
658
return !!this._languageModelProxyProvider;
659
}
660
661
async getModelProxy(extension: IExtensionDescription): Promise<vscode.LanguageModelProxy> {
662
checkProposedApiEnabled(extension, 'languageModelProxy');
663
664
if (!this._languageModelProxyProvider) {
665
this._logService.trace('[LanguageModelProxy] No LanguageModelProxyProvider registered');
666
throw new Error('No language model proxy provider is registered.');
667
}
668
669
const requestingExtensionId = ExtensionIdentifier.toKey(extension.identifier);
670
try {
671
const result = await Promise.resolve(this._languageModelProxyProvider.provideModelProxy(requestingExtensionId, CancellationToken.None));
672
if (!result) {
673
this._logService.warn(`[LanguageModelProxy] Provider returned no proxy for ${requestingExtensionId}`);
674
throw new Error('Language model proxy is not available.');
675
}
676
return result;
677
} catch (err) {
678
this._logService.error(`[LanguageModelProxy] Provider failed to return proxy for ${requestingExtensionId}`, err);
679
throw err;
680
}
681
}
682
683
async $isFileIgnored(handle: number, uri: UriComponents, token: CancellationToken): Promise<boolean> {
684
const provider = this._ignoredFileProviders.get(handle);
685
if (!provider) {
686
throw new Error('Unknown LanguageModelIgnoredFileProvider');
687
}
688
689
return (await provider.provideFileIgnored(URI.revive(uri), token)) ?? false;
690
}
691
692
registerIgnoredFileProvider(extension: IExtensionDescription, provider: vscode.LanguageModelIgnoredFileProvider): vscode.Disposable {
693
checkProposedApiEnabled(extension, 'chatParticipantPrivate');
694
695
const handle = ExtHostLanguageModels._idPool++;
696
this._proxy.$registerFileIgnoreProvider(handle);
697
this._ignoredFileProviders.set(handle, provider);
698
return toDisposable(() => {
699
this._proxy.$unregisterFileIgnoreProvider(handle);
700
this._ignoredFileProviders.delete(handle);
701
});
702
}
703
704
registerLanguageModelProxyProvider(extension: IExtensionDescription, provider: vscode.LanguageModelProxyProvider): vscode.Disposable {
705
checkProposedApiEnabled(extension, 'chatParticipantPrivate');
706
707
this._languageModelProxyProvider = provider;
708
this._onDidChangeModelProxyAvailability.fire();
709
return toDisposable(() => {
710
if (this._languageModelProxyProvider === provider) {
711
this._languageModelProxyProvider = undefined;
712
this._onDidChangeModelProxyAvailability.fire();
713
}
714
});
715
}
716
}
717
718