CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
sagemathinc

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.

GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/next/pages/api/v2/llm/evaluate.ts
Views: 687
1
// This is the new endpoint for querying any LLM
2
// Previously, this has been in openai/chatgpt
3
4
import { evaluate } from "@cocalc/server/llm/index";
5
import { analytics_cookie_name } from "@cocalc/util/misc";
6
import getAccountId from "lib/account/get-account";
7
import getParams from "lib/api/get-params";
8
9
export default async function handle(req, res) {
10
try {
11
const result = await doIt(req);
12
res.json({ ...result, success: true });
13
} catch (err) {
14
res.json({ error: `${err.message}` });
15
return;
16
}
17
}
18
19
async function doIt(req) {
20
const { input, system, history, model, tag } = getParams(req);
21
const account_id = await getAccountId(req);
22
const analytics_cookie = req.cookies[analytics_cookie_name];
23
return {
24
output: await evaluate({
25
account_id,
26
analytics_cookie,
27
input,
28
system,
29
history,
30
model,
31
tag,
32
}),
33
};
34
}
35
36