CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
sagemathinc

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.

GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/frontend/admin/llm/index.tsx
Views: 687
1
import { Button, Col, Input, Row, Select, Space, Switch } from "antd";
2
3
import {
4
CSS,
5
redux,
6
useState,
7
useTypedRedux,
8
} from "@cocalc/frontend/app-framework";
9
import { Paragraph, Title } from "@cocalc/frontend/components";
10
import { LLMModelName } from "@cocalc/frontend/components/llm-name";
11
import {
12
LLMServiceName,
13
LLM_PROVIDER,
14
LanguageModelCore,
15
USER_SELECTABLE_LLMS_BY_VENDOR,
16
isCoreLanguageModel,
17
toCustomOpenAIModel,
18
toOllamaModel,
19
} from "@cocalc/util/db-schema/llm-utils";
20
import { getRandomColor, trunc_middle } from "@cocalc/util/misc";
21
import { TestLLM } from "./test-component";
22
import { PROMPTS } from "./tests";
23
import { Value } from "./value";
24
25
export function TestLLMAdmin() {
26
const customize = redux.getStore("customize");
27
const globallyEnabledLLMs = customize.getEnabledLLMs();
28
const selectableLLMs = useTypedRedux("customize", "selectable_llms");
29
const ollama = useTypedRedux("customize", "ollama");
30
const custom_openai = useTypedRedux("customize", "custom_openai");
31
const [test, setTest] = useState<number | null>(0);
32
// TODO: this is used to trigger sending queries – makes no sense that all of them disable it. fix this.
33
const [querying, setQuerying] = useState<boolean>();
34
const [all, setAll] = useState<boolean>(false);
35
36
function llmStyle(llm: string): CSS {
37
return {
38
marginLeft: "5px",
39
marginBottom: "5px",
40
borderLeft: `5px solid ${getRandomColor(llm, {
41
min: 0,
42
max: 255,
43
diff: 100,
44
})}`,
45
};
46
}
47
48
function renderStatus(llm: LanguageModelCore, vendor: LLMServiceName) {
49
const enabled = all || selectableLLMs.includes(llm);
50
51
return (
52
<Row gutter={[10, 20]} style={llmStyle(llm)} key={`${vendor}-${llm}`}>
53
<Col md={24}>
54
<Space>
55
<Value val={enabled} /> <LLMModelName model={llm} />
56
</Space>
57
</Col>
58
<Col md={24}>
59
{enabled ? (
60
<TestLLM
61
test={test}
62
model={llm}
63
queryState={[querying, setQuerying]}
64
/>
65
) : undefined}
66
</Col>
67
</Row>
68
);
69
}
70
71
function renderCustomOpenAI() {
72
return (
73
<Col key={"custom_openai"} md={12} xs={24}>
74
<Title level={5}>Custom OpenAI</Title>
75
{Object.entries(custom_openai?.toJS() ?? {}).map(([key, _val]) => {
76
const model = toCustomOpenAIModel(key);
77
78
return (
79
<Row
80
gutter={[10, 20]}
81
style={llmStyle(model)}
82
key={`custom_openai-${key}`}
83
>
84
<Col md={24}>
85
<Space>
86
<Value val={true} /> <LLMModelName model={model} />
87
</Space>
88
</Col>
89
<Col md={24}>
90
<TestLLM
91
test={test}
92
model={model}
93
queryState={[querying, setQuerying]}
94
/>
95
</Col>
96
</Row>
97
);
98
})}
99
</Col>
100
);
101
}
102
103
function renderOllama() {
104
return (
105
<Col key={"ollama"} md={12} xs={24}>
106
<Title level={5}>Ollama</Title>
107
{Object.entries(ollama?.toJS() ?? {}).map(([key, _val]) => {
108
const model = toOllamaModel(key);
109
110
return (
111
<Row
112
gutter={[10, 20]}
113
style={llmStyle(model)}
114
key={`ollama-${key}`}
115
>
116
<Col md={24}>
117
<Space>
118
<Value val={true} /> <LLMModelName model={model} />
119
</Space>
120
</Col>
121
<Col md={24}>
122
<TestLLM
123
test={test}
124
model={model}
125
queryState={[querying, setQuerying]}
126
/>
127
</Col>
128
</Row>
129
);
130
})}
131
</Col>
132
);
133
}
134
135
return (
136
<div>
137
<Paragraph>
138
Globally enabled LLMs (Admin Settings):
139
<Value val={globallyEnabledLLMs} />.
140
</Paragraph>
141
<Paragraph>
142
<Space>
143
<Input
144
value={test != null ? PROMPTS[test].prompt : ""}
145
disabled={true || querying}
146
onChange={(e) => setTest(parseInt(e.target.value))}
147
placeholder="Enter a query..."
148
addonAfter={
149
<Select
150
onSelect={setTest}
151
defaultValue={0}
152
popupMatchSelectWidth={false}
153
>
154
{PROMPTS.map((p, i) => (
155
<Select.Option key={i} value={i}>
156
{trunc_middle(p.prompt, 25)}
157
</Select.Option>
158
))}
159
</Select>
160
}
161
/>
162
<Button
163
type="primary"
164
onClick={() => setQuerying(true)}
165
disabled={test == null || querying}
166
>
167
Run Tests
168
</Button>
169
<Button onClick={() => setTest(null)}>Clear</Button>
170
<Switch onChange={(e) => setAll(e)} /> All
171
</Space>
172
</Paragraph>
173
<Paragraph>
174
<Row gutter={[10, 10]}>
175
{Object.entries(USER_SELECTABLE_LLMS_BY_VENDOR).map(
176
([vendor, llms]) =>
177
vendor !== "ollama" && vendor !== "custom_openai" ? (
178
<Col key={vendor} md={12} xs={24}>
179
<Title level={5}>{LLM_PROVIDER[vendor].name}</Title>
180
{llms
181
.filter(isCoreLanguageModel)
182
.map((llm) => renderStatus(llm, vendor as LLMServiceName))}
183
</Col>
184
) : undefined,
185
)}
186
{renderOllama()}
187
{renderCustomOpenAI()}
188
</Row>
189
</Paragraph>
190
191
<Title level={5}>Ollama configuration</Title>
192
<Value val={ollama} />
193
<Title level={5}>Custom OpenAI API</Title>
194
<Value val={custom_openai} />
195
</div>
196
);
197
}
198
199