CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
sagemathinc

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.

GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/frontend/admin/llm/test-component.tsx
Views: 687
1
import { Alert, Space } from "antd";
2
import { throttle } from "lodash";
3
4
import {
5
useAsyncEffect,
6
useEffect,
7
useState,
8
} from "@cocalc/frontend/app-framework";
9
import { Icon, Loading } from "@cocalc/frontend/components";
10
import { Markdown } from "@cocalc/frontend/markdown";
11
import { webapp_client } from "@cocalc/frontend/webapp-client";
12
import { LanguageModelCore } from "@cocalc/util/db-schema/llm-utils";
13
import { PROMPTS } from "./tests";
14
import { Value } from "./value";
15
16
interface TestLLMProps {
17
model: LanguageModelCore | string;
18
test: number | null;
19
queryState: [boolean | undefined, (val: boolean) => void];
20
}
21
22
export function TestLLM({ model, test, queryState }: TestLLMProps) {
23
const [querying, setQuerying] = queryState;
24
const [output, setOutput] = useState<string>("");
25
const [error, setError] = useState<string>("");
26
const [passed, setPassed] = useState<boolean | undefined>();
27
28
const {
29
prompt,
30
expected,
31
system = undefined,
32
history = undefined,
33
} = typeof test === "number" ? PROMPTS[test] : { prompt: "", expected: "" };
34
const expectedRegex = new RegExp(expected, "g");
35
36
const check = throttle(
37
() => {
38
if (passed != null && output.trim() === "") {
39
setPassed(undefined);
40
} else if (expectedRegex.test(output) && !passed) {
41
setPassed(true);
42
}
43
},
44
250,
45
{
46
leading: false,
47
trailing: true,
48
},
49
);
50
51
useEffect(() => {
52
if (prompt.trim() === "") {
53
setOutput("");
54
setError("");
55
setPassed(undefined);
56
}
57
}, [prompt, test]);
58
59
useEffect(() => {
60
check();
61
}, [output]);
62
63
useAsyncEffect(async () => {
64
if (!querying || prompt.trim() === "") {
65
querying && setQuerying(false);
66
setError("");
67
return;
68
}
69
70
try {
71
setPassed(undefined);
72
const llmStream = webapp_client.openai_client.queryStream({
73
input: prompt,
74
project_id: null,
75
tag: "admin-llm-test",
76
model,
77
system,
78
history,
79
maxTokens: 20,
80
});
81
82
let reply = "";
83
llmStream.on("token", (token) => {
84
if (token) {
85
reply += token;
86
setOutput(reply);
87
}
88
});
89
90
llmStream.on("error", (err) => {
91
setPassed(false);
92
setError(err?.toString());
93
setQuerying(false);
94
});
95
} catch (err) {
96
setError(err?.toString());
97
} finally {
98
setQuerying(false);
99
}
100
}, [querying]);
101
102
function renderPassed() {
103
if (typeof passed === "boolean") {
104
return <Value val={passed} />;
105
} else {
106
return <Icon unicode={0x2753} />;
107
}
108
}
109
110
if (querying) {
111
return <Loading />;
112
}
113
114
return (
115
<>
116
<Space direction="horizontal" align="start">
117
{renderPassed()} <Markdown value={output} />
118
</Space>
119
{error ? <Alert banner type="error" message={error} /> : undefined}
120
</>
121
);
122
}
123
124