Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/extension/byok/vscode-node/abstractLanguageModelChatProvider.ts
13399 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { CancellationToken, commands, LanguageModelChatInformation, LanguageModelChatMessage, LanguageModelChatMessage2, LanguageModelChatProvider, LanguageModelResponsePart2, PrepareLanguageModelChatModelOptions, Progress, ProvideLanguageModelChatResponseOptions } from 'vscode';
7
import { IConfigurationService } from '../../../platform/configuration/common/configurationService';
8
import { IChatModelInformation, ModelSupportedEndpoint } from '../../../platform/endpoint/common/endpointProvider';
9
import { ILogService } from '../../../platform/log/common/logService';
10
import { IFetcherService } from '../../../platform/networking/common/fetcherService';
11
import { IExperimentationService } from '../../../platform/telemetry/common/nullExperimentationService';
12
import { IStringDictionary } from '../../../util/vs/base/common/collections';
13
import { IInstantiationService } from '../../../util/vs/platform/instantiation/common/instantiation';
14
import { CopilotLanguageModelWrapper } from '../../conversation/vscode-node/languageModelAccess';
15
import { BYOKAuthType, BYOKKnownModels, byokKnownModelsToAPIInfo, BYOKModelCapabilities, resolveModelInfo } from '../common/byokProvider';
16
import { OpenAIEndpoint } from '../node/openAIEndpoint';
17
import { IBYOKStorageService } from './byokStorageService';
18
19
export interface LanguageModelChatConfiguration {
20
readonly apiKey?: string;
21
}
22
23
export interface ExtendedLanguageModelChatInformation<C extends LanguageModelChatConfiguration> extends LanguageModelChatInformation {
24
readonly configuration?: C;
25
}
26
27
export abstract class AbstractLanguageModelChatProvider<C extends LanguageModelChatConfiguration = LanguageModelChatConfiguration, T extends ExtendedLanguageModelChatInformation<C> = ExtendedLanguageModelChatInformation<C>> implements LanguageModelChatProvider<T> {
28
29
constructor(
30
protected readonly _id: string,
31
protected readonly _name: string,
32
protected _knownModels: BYOKKnownModels | undefined,
33
protected readonly _byokStorageService: IBYOKStorageService,
34
@ILogService protected readonly _logService: ILogService,
35
) {
36
this.configureDefaultGroupWithApiKeyOnly();
37
}
38
39
// TODO: Remove this after 6 months
40
protected async configureDefaultGroupWithApiKeyOnly(): Promise<string | undefined> {
41
const apiKey = await this._byokStorageService.getAPIKey(this._name);
42
if (apiKey) {
43
this.configureDefaultGroupIfExists(this._name, { apiKey } as C);
44
await this._byokStorageService.deleteAPIKey(this._name, BYOKAuthType.GlobalApiKey);
45
}
46
return apiKey;
47
}
48
49
protected async configureDefaultGroupIfExists(name: string, configuration: C): Promise<void> {
50
await commands.executeCommand('lm.migrateLanguageModelsProviderGroup', { vendor: this._id, name, ...configuration });
51
}
52
53
async provideLanguageModelChatInformation({ silent, configuration }: PrepareLanguageModelChatModelOptions, token: CancellationToken): Promise<T[]> {
54
let apiKey: string | undefined = (configuration as C)?.apiKey;
55
if (!apiKey) {
56
apiKey = await this.configureDefaultGroupWithApiKeyOnly();
57
}
58
59
const models = await this.getAllModels(silent, apiKey, configuration as C);
60
return models.map(model => ({
61
...model,
62
apiKey,
63
configuration
64
}));
65
}
66
67
abstract provideLanguageModelChatResponse(model: T, messages: Array<LanguageModelChatMessage | LanguageModelChatMessage2>, options: ProvideLanguageModelChatResponseOptions, progress: Progress<LanguageModelResponsePart2>, token: CancellationToken): Promise<void>;
68
abstract provideTokenCount(model: T, text: string | LanguageModelChatMessage | LanguageModelChatMessage2, token: CancellationToken): Promise<number>;
69
protected abstract getAllModels(silent: boolean, apiKey: string | undefined, configuration: C | undefined): Promise<T[]>;
70
}
71
72
export interface OpenAICompatibleLanguageModelChatInformation<C extends LanguageModelChatConfiguration> extends ExtendedLanguageModelChatInformation<C> {
73
url: string;
74
}
75
76
export abstract class AbstractOpenAICompatibleLMProvider<T extends LanguageModelChatConfiguration = LanguageModelChatConfiguration> extends AbstractLanguageModelChatProvider<T, OpenAICompatibleLanguageModelChatInformation<T>> {
77
protected readonly _lmWrapper: CopilotLanguageModelWrapper;
78
79
constructor(
80
id: string,
81
name: string,
82
knownModels: BYOKKnownModels | undefined,
83
byokStorageService: IBYOKStorageService,
84
@IFetcherService protected readonly _fetcherService: IFetcherService,
85
logService: ILogService,
86
@IInstantiationService protected readonly _instantiationService: IInstantiationService,
87
@IConfigurationService protected readonly _configurationService: IConfigurationService,
88
@IExperimentationService protected readonly _expService: IExperimentationService
89
) {
90
super(id, name, knownModels, byokStorageService, logService);
91
this._lmWrapper = this._instantiationService.createInstance(CopilotLanguageModelWrapper);
92
}
93
94
async provideLanguageModelChatResponse(model: OpenAICompatibleLanguageModelChatInformation<T>, messages: Array<LanguageModelChatMessage | LanguageModelChatMessage2>, options: ProvideLanguageModelChatResponseOptions, progress: Progress<LanguageModelResponsePart2>, token: CancellationToken): Promise<void> {
95
const openAIChatEndpoint = await this.createOpenAIEndPoint(model);
96
return this._lmWrapper.provideLanguageModelResponse(openAIChatEndpoint, messages, options, options.requestInitiator, progress, token);
97
}
98
99
async provideTokenCount(model: OpenAICompatibleLanguageModelChatInformation<T>, text: string | LanguageModelChatMessage | LanguageModelChatMessage2, token: CancellationToken): Promise<number> {
100
const openAIChatEndpoint = await this.createOpenAIEndPoint(model);
101
return this._lmWrapper.provideTokenCount(openAIChatEndpoint, text);
102
}
103
104
protected async getAllModels(silent: boolean, apiKey: string | undefined, configuration: T | undefined): Promise<OpenAICompatibleLanguageModelChatInformation<T>[]> {
105
const modelsUrl = this.getModelsBaseUrl(configuration);
106
if (modelsUrl) {
107
const models = await this.getModelsFromEndpoint(modelsUrl, silent, apiKey);
108
return byokKnownModelsToAPIInfo(this._name, models).map(model => ({
109
...model,
110
url: modelsUrl
111
}));
112
}
113
return [];
114
}
115
116
private async getModelsFromEndpoint(endpoint: string, silent: boolean, apiKey: string | undefined): Promise<BYOKKnownModels> {
117
if (!apiKey && silent) {
118
return {};
119
}
120
121
try {
122
const headers: IStringDictionary<string> = {
123
'Content-Type': 'application/json',
124
'Authorization': `Bearer ${apiKey}`
125
};
126
127
const modelsEndpoint = this.getModelsDiscoveryUrl(endpoint);
128
const response = await this._fetcherService.fetch(modelsEndpoint, {
129
method: 'GET',
130
headers,
131
callSite: 'byok-models-discovery',
132
});
133
const data = await response.json();
134
const modelList: BYOKKnownModels = {};
135
136
const models = data.data ?? data.models;
137
if (!models || !Array.isArray(models)) {
138
throw new Error('Invalid response format');
139
}
140
141
for (const model of models) {
142
let modelCapabilities = this._knownModels?.[model.id];
143
if (!modelCapabilities) {
144
modelCapabilities = this.resolveModelCapabilities(model);
145
if (!modelCapabilities) {
146
continue;
147
}
148
if (!this._knownModels) {
149
this._knownModels = {};
150
}
151
this._knownModels[model.id] = modelCapabilities;
152
}
153
modelList[model.id] = modelCapabilities;
154
}
155
return modelList;
156
} catch (error) {
157
this._logService.error(error, `Error fetching available OpenRouter models`);
158
throw error;
159
}
160
}
161
162
protected async createOpenAIEndPoint(model: OpenAICompatibleLanguageModelChatInformation<T>): Promise<OpenAIEndpoint> {
163
const modelInfo = this.getModelInfo(model.id, model.url);
164
const url = modelInfo.supported_endpoints?.includes(ModelSupportedEndpoint.Responses) ?
165
`${model.url}/responses` :
166
`${model.url}/chat/completions`;
167
return this._instantiationService.createInstance(OpenAIEndpoint, modelInfo, model.configuration?.apiKey ?? '', url);
168
}
169
170
protected getModelInfo(modelId: string, modelUrl: string): IChatModelInformation {
171
return resolveModelInfo(modelId, this._name, this._knownModels);
172
}
173
174
protected resolveModelCapabilities(modelData: unknown): BYOKModelCapabilities | undefined {
175
return undefined;
176
}
177
178
protected abstract getModelsBaseUrl(configuration: T | undefined): string | undefined;
179
180
protected getModelsDiscoveryUrl(modelsBaseUrl: string): string {
181
return `${modelsBaseUrl}/models`;
182
}
183
184
}
185