Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
sagemathinc
GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/next/pages/api/v2/llm/evaluate.ts
5968 views
1
// This is the new endpoint for querying any LLM
2
// Previously, this has been in openai/chatgpt
3
4
import type { Request, Response } from "express";
5
6
import { evaluate } from "@cocalc/server/llm/index";
7
import getAccountId from "lib/account/get-account";
8
import getParams from "lib/api/get-params";
9
import { getAnonymousID } from "lib/user-id";
10
11
export default async function handle(req: Request, res: Response) {
12
try {
13
const result = await doIt(req);
14
res.json({ ...result, success: true });
15
} catch (err) {
16
res.json({ error: `${err.message}` });
17
return;
18
}
19
}
20
21
async function doIt(req: Request) {
22
const { input, system, history, model, tag } = getParams(req);
23
const account_id = await getAccountId(req);
24
const anonymous_id = await getAnonymousID(req);
25
return {
26
output: await evaluate({
27
account_id,
28
anonymous_id,
29
input,
30
system,
31
history,
32
model,
33
tag,
34
}),
35
};
36
}
37
38