Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
sagemathinc
GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/frontend/account/user-defined-llm.tsx
5977 views
1
import {
2
Alert,
3
Button,
4
Flex,
5
Form,
6
Input,
7
InputNumber,
8
List,
9
Modal,
10
Popconfirm,
11
Select,
12
Skeleton,
13
Space,
14
Tooltip,
15
} from "antd";
16
import { useWatch } from "antd/es/form/Form";
17
import { sortBy } from "lodash";
18
import { FormattedMessage, useIntl } from "react-intl";
19
20
import {
21
CSS,
22
useEffect,
23
useState,
24
useTypedRedux,
25
} from "@cocalc/frontend/app-framework";
26
import {
27
A,
28
HelpIcon,
29
Icon,
30
RawPrompt,
31
Text,
32
} from "@cocalc/frontend/components";
33
import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-model-icon";
34
import { webapp_client } from "@cocalc/frontend/webapp-client";
35
import { OTHER_SETTINGS_USER_DEFINED_LLM as KEY } from "@cocalc/util/db-schema/defaults";
36
import {
37
FALLBACK_MAX_TOKENS,
38
LLM_PROVIDER,
39
SERVICES,
40
UserDefinedLLM,
41
UserDefinedLLMService,
42
isLLMServiceName,
43
toUserLLMModelName,
44
} from "@cocalc/util/db-schema/llm-utils";
45
import { trunc, unreachable } from "@cocalc/util/misc";
46
import { Panel } from "@cocalc/frontend/antd-bootstrap";
47
48
// @cspell:ignore mixtral userdefined
49
50
interface Props {
51
style?: CSS;
52
on_change: (name: string, value: any) => void;
53
}
54
55
export function UserDefinedLLMComponent({ style, on_change }: Props) {
56
const intl = useIntl();
57
const user_defined_llm = useTypedRedux("customize", "user_defined_llm");
58
const other_settings = useTypedRedux("account", "other_settings");
59
const [form] = Form.useForm();
60
const [editLLM, setEditLLM] = useState<UserDefinedLLM | null>(null);
61
const [tmpLLM, setTmpLLM] = useState<UserDefinedLLM | null>(null);
62
const [loading, setLoading] = useState(false);
63
const [llms, setLLMs] = useState<UserDefinedLLM[]>([]);
64
const [error, setError] = useState<string | null>(null);
65
66
const [needAPIKey, setNeedAPIKey] = useState(false);
67
const [needEndpoint, setNeedEndpoint] = useState(false);
68
69
const service: UserDefinedLLMService = useWatch("service", form);
70
useEffect(() => {
71
const v = service === "custom_openai" || service === "ollama";
72
setNeedAPIKey(!v);
73
setNeedEndpoint(v);
74
}, [service]);
75
76
useEffect(() => {
77
setLoading(true);
78
const val = other_settings?.get(KEY) ?? "[]";
79
try {
80
const data: UserDefinedLLM[] = JSON.parse(val);
81
setLLMs(sortBy(data, "id"));
82
} catch (e) {
83
setError(`Error parsing custom LLMs: ${e}`);
84
setLLMs([]);
85
}
86
setLoading(false);
87
}, [other_settings?.get(KEY)]);
88
89
useEffect(() => {
90
if (editLLM != null) {
91
form.setFieldsValue(editLLM);
92
} else {
93
form.resetFields();
94
}
95
}, [editLLM]);
96
97
function getNextID(): number {
98
let id = 0;
99
llms.forEach((m) => (m.id > id ? (id = m.id) : null));
100
return id + 1;
101
}
102
103
function save(next: UserDefinedLLM, oldID: number) {
104
// trim each field in next
105
for (const key in next) {
106
if (typeof next[key] === "string") {
107
next[key] = next[key].trim();
108
}
109
}
110
// set id if not set
111
next.id ??= getNextID();
112
113
const { service, display, model, endpoint } = next;
114
if (
115
!display ||
116
!model ||
117
(needEndpoint && !endpoint) ||
118
(needAPIKey && !next.apiKey)
119
) {
120
setError("Please fill all fields – click the add button and fix it!");
121
return;
122
}
123
if (!SERVICES.includes(service as any)) {
124
setError(`Invalid service: ${service}`);
125
return;
126
}
127
try {
128
// replace an entry with the same ID, if it exists
129
const newModels = llms.filter((m) => m.id !== oldID);
130
newModels.push(next);
131
on_change(KEY, JSON.stringify(newModels));
132
setEditLLM(null);
133
} catch (err) {
134
setError(`Error saving custom LLM: ${err}`);
135
}
136
}
137
138
function deleteLLM(model: string) {
139
try {
140
const newModels = llms.filter((m) => m.model !== model);
141
on_change(KEY, JSON.stringify(newModels));
142
} catch (err) {
143
setError(`Error deleting custom LLM: ${err}`);
144
}
145
}
146
147
function addLLM() {
148
return (
149
<Button
150
block
151
icon={<Icon name="plus-circle-o" />}
152
onClick={() => {
153
if (!error) {
154
setEditLLM({
155
id: getNextID(),
156
service: "custom_openai",
157
display: "",
158
endpoint: "",
159
model: "",
160
apiKey: "",
161
});
162
} else {
163
setEditLLM(tmpLLM);
164
setError(null);
165
}
166
}}
167
>
168
<FormattedMessage
169
id="account.user-defined-llm.add_button.label"
170
defaultMessage="Add your own Language Model"
171
/>
172
</Button>
173
);
174
}
175
176
async function test(llm: UserDefinedLLM) {
177
setLoading(true);
178
Modal.info({
179
closable: true,
180
title: `Test ${llm.display} (${llm.model})`,
181
content: <TestCustomLLM llm={llm} />,
182
okText: "Close",
183
});
184
setLoading(false);
185
}
186
187
function renderList() {
188
return (
189
<List
190
loading={loading}
191
itemLayout="horizontal"
192
dataSource={llms}
193
renderItem={(item: UserDefinedLLM) => {
194
const { display, model, endpoint, service } = item;
195
if (!isLLMServiceName(service)) return null;
196
197
return (
198
<List.Item
199
actions={[
200
<Button
201
icon={<Icon name="pen" />}
202
type="link"
203
onClick={() => {
204
setEditLLM(item);
205
}}
206
>
207
Edit
208
</Button>,
209
<Popconfirm
210
title={`Are you sure you want to delete the LLM ${display} (${model})?`}
211
onConfirm={() => deleteLLM(model)}
212
okText="Yes"
213
cancelText="No"
214
>
215
<Button icon={<Icon name="trash" />} type="link" danger>
216
Delete
217
</Button>
218
</Popconfirm>,
219
<Button
220
icon={<Icon name="play-circle" />}
221
type="link"
222
onClick={() => test(item)}
223
>
224
Test
225
</Button>,
226
]}
227
>
228
<Skeleton avatar title={false} loading={false} active>
229
<Tooltip
230
title={
231
<>
232
Model: {model}
233
<br />
234
Endpoint: {endpoint}
235
<br />
236
Service: {service}
237
</>
238
}
239
>
240
<List.Item.Meta
241
avatar={
242
<LanguageModelVendorAvatar
243
model={toUserLLMModelName(item)}
244
/>
245
}
246
title={display}
247
/>
248
</Tooltip>
249
</Skeleton>
250
</List.Item>
251
);
252
}}
253
/>
254
);
255
}
256
257
function renderExampleModel() {
258
switch (service) {
259
case "custom_openai":
260
case "openai":
261
return "'gpt-4o'";
262
case "ollama":
263
return "'llama3:latest', 'phi3:instruct', ...";
264
case "anthropic":
265
return "'claude-3-sonnet-20240229'";
266
case "mistralai":
267
return "'open-mixtral-8x22b'";
268
case "google":
269
return "'gemini-2.0-flash'";
270
case "xai":
271
return "'grok-4-1-fast-non-reasoning-16k'";
272
default:
273
unreachable(service);
274
return "'llama3:latest'";
275
}
276
}
277
278
function renderForm() {
279
if (!editLLM) return null;
280
return (
281
<Modal
282
open={editLLM != null}
283
title="Edit Language Model"
284
onOk={() => {
285
const vals = form.getFieldsValue(true);
286
setTmpLLM(vals);
287
save(vals, editLLM.id);
288
setEditLLM(null);
289
}}
290
onCancel={() => {
291
setEditLLM(null);
292
}}
293
>
294
<Form
295
form={form}
296
layout="horizontal"
297
labelCol={{ span: 8 }}
298
wrapperCol={{ span: 16 }}
299
>
300
<Form.Item
301
label="Display Name"
302
name="display"
303
rules={[{ required: true }]}
304
help="e.g. 'MyLLM'"
305
>
306
<Input />
307
</Form.Item>
308
<Form.Item
309
label="Service"
310
name="service"
311
rules={[{ required: true }]}
312
help="Select the kind of server to talk to. Probably 'OpenAI API' or 'Ollama'"
313
>
314
<Select popupMatchSelectWidth={false}>
315
{SERVICES.map((option) => {
316
const { name, desc } = LLM_PROVIDER[option];
317
return (
318
<Select.Option key={option} value={option}>
319
<Tooltip title={desc} placement="right">
320
<Text strong>{name}</Text>: {trunc(desc, 50)}
321
</Tooltip>
322
</Select.Option>
323
);
324
})}
325
</Select>
326
</Form.Item>
327
<Form.Item
328
label="Model Name"
329
name="model"
330
rules={[{ required: true }]}
331
help={`This depends on the available models. e.g. ${renderExampleModel()}.`}
332
>
333
<Input />
334
</Form.Item>
335
<Form.Item
336
label="Endpoint URL"
337
name="endpoint"
338
rules={[{ required: needEndpoint }]}
339
help={
340
needEndpoint
341
? "e.g. 'https://your.ollama.server:11434/' or 'https://api.openai.com/v1'"
342
: "This setting is ignored."
343
}
344
>
345
<Input disabled={!needEndpoint} />
346
</Form.Item>
347
<Form.Item
348
label="API Key"
349
name="apiKey"
350
help="A secret string, which you got from the service provider."
351
rules={[{ required: needAPIKey }]}
352
>
353
<Input />
354
</Form.Item>
355
<Form.Item
356
label="Max Tokens"
357
name="max_tokens"
358
help={`Context window size in tokens. Leave empty to use default (${FALLBACK_MAX_TOKENS}). Valid range: 1000-2000000.`}
359
rules={[
360
{
361
type: "number",
362
min: 1000,
363
max: 2000000,
364
message: "Must be between 1000 and 2000000",
365
},
366
]}
367
>
368
<InputNumber
369
min={1000}
370
max={2000000}
371
placeholder={`${FALLBACK_MAX_TOKENS} (default)`}
372
style={{ width: "100%" }}
373
/>
374
</Form.Item>
375
</Form>
376
</Modal>
377
);
378
}
379
380
function renderError() {
381
if (!error) return null;
382
return <Alert message={error} type="error" closable />;
383
}
384
385
const title = intl.formatMessage({
386
id: "account.user-defined-llm.title",
387
defaultMessage: "Bring your own Language Model",
388
});
389
390
function renderContent() {
391
if (user_defined_llm) {
392
return (
393
<>
394
{renderForm()}
395
{renderList()}
396
{addLLM()}
397
{renderError()}
398
</>
399
);
400
} else {
401
return <Alert banner type="info" message="This feature is disabled." />;
402
}
403
}
404
405
function renderHelpIcon() {
406
return (
407
<HelpIcon style={{ float: "right" }} maxWidth="300px" title={title}>
408
<FormattedMessage
409
id="account.user-defined-llm.info"
410
defaultMessage={`This allows you to call a {llm} of your own.
411
You either need an API key or run it on your own server.
412
Make sure to click on "Test" to check, that the communication to the API actually works.
413
Most likely, the type you are looking for is "Custom OpenAI" or "Ollama".`}
414
values={{
415
llm: (
416
<A href={"https://en.wikipedia.org/wiki/Large_language_model"}>
417
Large Language Model
418
</A>
419
),
420
}}
421
/>
422
</HelpIcon>
423
);
424
}
425
426
return (
427
<Panel
428
style={style}
429
size={"small"}
430
header={
431
<>
432
{title}
433
{renderHelpIcon()}
434
</>
435
}
436
>
437
{renderContent()}
438
</Panel>
439
);
440
}
441
442
function TestCustomLLM({ llm }: { llm: UserDefinedLLM }) {
443
const [querying, setQuerying] = useState<boolean>(false);
444
const [prompt, setPrompt] = useState<string>("Capital city of Australia?");
445
const [reply, setReply] = useState<string>("");
446
const [error, setError] = useState<string>("");
447
448
async function doQuery() {
449
setQuerying(true);
450
setError("");
451
setReply("");
452
try {
453
const llmStream = webapp_client.openai_client.queryStream({
454
input: prompt,
455
project_id: null,
456
tag: "userdefined-llm-test",
457
model: toUserLLMModelName(llm),
458
system: "This is a test. Reply briefly.",
459
maxTokens: 100,
460
});
461
462
let reply = "";
463
llmStream.on("token", (token) => {
464
if (token) {
465
reply += token;
466
setReply(reply);
467
} else {
468
setQuerying(false);
469
}
470
});
471
472
llmStream.on("error", (err) => {
473
setError(err?.toString());
474
setQuerying(false);
475
});
476
} catch (e) {
477
setError(e.message);
478
setReply("");
479
setQuerying(false);
480
}
481
}
482
483
// TODO implement a button (or whatever) to query the backend and show the response in real time
484
return (
485
<Space direction="vertical">
486
<Flex vertical={false} align="center" gap={5}>
487
<Flex>Prompt: </Flex>
488
<Input
489
value={prompt}
490
onChange={(e) => setPrompt(e.target.value)}
491
onPressEnter={doQuery}
492
/>
493
<Button loading={querying} type="primary" onClick={doQuery}>
494
Test
495
</Button>
496
</Flex>
497
{reply ? (
498
<>
499
Reply:
500
<RawPrompt input={reply} />
501
</>
502
) : null}
503
{error ? <Alert banner message={error} type="error" /> : null}
504
</Space>
505
);
506
}
507
508