Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/extensions/copilot/src/platform/endpoint/test/node/mockEndpoint.ts
13405 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import { Raw } from '@vscode/prompt-tsx';
7
import { modelSupportsToolSearch } from '../../common/chatModelCapabilities';
8
import { ITokenizer, TokenizerType } from '../../../../util/common/tokenizer';
9
import { AsyncIterableObject } from '../../../../util/vs/base/common/async';
10
import { CancellationToken } from '../../../../util/vs/base/common/cancellation';
11
import { IChatMLFetcher, Source } from '../../../chat/common/chatMLFetcher';
12
import { ChatLocation, ChatResponse } from '../../../chat/common/commonTypes';
13
import { CHAT_MODEL } from '../../../configuration/common/configurationService';
14
import { ILogService } from '../../../log/common/logService';
15
import { FinishedCallback, OptionalChatRequestParams } from '../../../networking/common/fetch';
16
import { Response } from '../../../networking/common/fetcherService';
17
import { createCapiRequestBody, IChatEndpoint, ICreateEndpointBodyOptions, IEndpointBody, IMakeChatRequestOptions } from '../../../networking/common/networking';
18
import { ChatCompletion } from '../../../networking/common/openai';
19
import { ITelemetryService, TelemetryProperties } from '../../../telemetry/common/telemetry';
20
import { TelemetryData } from '../../../telemetry/common/telemetryData';
21
import { ITokenizerProvider } from '../../../tokenizer/node/tokenizer';
22
23
export class MockEndpoint implements IChatEndpoint {
24
constructor(
25
family: string | undefined,
26
@IChatMLFetcher private readonly _chatMLFetcher: IChatMLFetcher,
27
@ITokenizerProvider private readonly _tokenizerProvider: ITokenizerProvider,
28
) {
29
if (family !== undefined) {
30
this.family = family;
31
this.model = family;
32
this.supportsToolSearch = modelSupportsToolSearch(family);
33
}
34
}
35
36
isPremium: boolean = false;
37
multiplier: number = 0;
38
restrictedToSkus?: string[] | undefined;
39
40
maxOutputTokens: number = 50000;
41
model: string = CHAT_MODEL.GPT41;
42
modelProvider: string = 'Mock Endpoint';
43
supportsToolCalls: boolean = false;
44
supportsToolSearch?: boolean;
45
supportsVision: boolean = false;
46
supportsPrediction: boolean = true;
47
showInModelPicker: boolean = true;
48
isDefault: boolean = false;
49
isFallback: boolean = false;
50
policy: 'enabled' | { terms: string } = 'enabled';
51
urlOrRequestMetadata: string = 'https://microsoft.com';
52
modelMaxPromptTokens: number = 50000;
53
name: string = 'test';
54
family: string = 'test';
55
version: string = '1.0';
56
tokenizer: TokenizerType = TokenizerType.O200K;
57
58
processResponseFromChatEndpoint(telemetryService: ITelemetryService, logService: ILogService, response: Response, expectedNumChoices: number, finishCallback: FinishedCallback, telemetryData: TelemetryData, cancellationToken?: CancellationToken): Promise<AsyncIterableObject<ChatCompletion>> {
59
throw new Error('Method not implemented.');
60
}
61
62
acceptChatPolicy(): Promise<boolean> {
63
throw new Error('Method not implemented.');
64
}
65
66
makeChatRequest2(options: IMakeChatRequestOptions, token: CancellationToken): Promise<ChatResponse> {
67
return this._chatMLFetcher.fetchOne({
68
requestOptions: {},
69
...options,
70
endpoint: this,
71
}, token);
72
}
73
74
createRequestBody(options: ICreateEndpointBodyOptions): IEndpointBody {
75
return createCapiRequestBody(options, this.model);
76
}
77
78
public async makeChatRequest(
79
debugName: string,
80
messages: Raw.ChatMessage[],
81
finishedCb: FinishedCallback | undefined,
82
token: CancellationToken,
83
location: ChatLocation,
84
source?: Source,
85
requestOptions?: Omit<OptionalChatRequestParams, 'n'>,
86
userInitiatedRequest?: boolean,
87
telemetryProperties?: TelemetryProperties,
88
): Promise<ChatResponse> {
89
return this.makeChatRequest2({
90
debugName,
91
messages,
92
finishedCb,
93
location,
94
source,
95
requestOptions,
96
userInitiatedRequest,
97
telemetryProperties,
98
}, token);
99
}
100
101
cloneWithTokenOverride(modelMaxPromptTokens: number): IChatEndpoint {
102
throw new Error('Method not implemented.');
103
}
104
105
getExtraHeaders?(): Record<string, string> {
106
throw new Error('Method not implemented.');
107
}
108
109
interceptBody?(body: IEndpointBody | undefined): void {
110
throw new Error('Method not implemented.');
111
}
112
113
acquireTokenizer(): ITokenizer {
114
return this._tokenizerProvider.acquireTokenizer(this);
115
}
116
}
117
118