Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Path: blob/master/src/packages/frontend/account/user-defined-llm.tsx
Views: 687
import {1Alert,2Button,3Flex,4Form,5Input,6List,7Modal,8Popconfirm,9Select,10Skeleton,11Space,12Tooltip,13} from "antd";14import { useWatch } from "antd/es/form/Form";15import { sortBy } from "lodash";16import { FormattedMessage, useIntl } from "react-intl";1718import {19useEffect,20useState,21useTypedRedux,22} from "@cocalc/frontend/app-framework";23import {24A,25HelpIcon,26Icon,27RawPrompt,28Text,29Title,30} from "@cocalc/frontend/components";31import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-model-icon";32import { webapp_client } from "@cocalc/frontend/webapp-client";33import { OTHER_SETTINGS_USERDEFINED_LLM as KEY } from "@cocalc/util/db-schema/defaults";34import {35LLM_PROVIDER,36SERVICES,37UserDefinedLLM,38UserDefinedLLMService,39isLLMServiceName,40toUserLLMModelName,41} from "@cocalc/util/db-schema/llm-utils";42import { trunc, unreachable } from "@cocalc/util/misc";4344interface Props {45on_change: (name: string, value: any) => void;46}4748export function UserDefinedLLMComponent({ on_change }: Props) {49const intl = useIntl();50const user_defined_llm = useTypedRedux("customize", "user_defined_llm");51const other_settings = useTypedRedux("account", "other_settings");52const [form] = Form.useForm();53const [editLLM, setEditLLM] = useState<UserDefinedLLM | null>(null);54const [tmpLLM, setTmpLLM] = useState<UserDefinedLLM | null>(null);55const [loading, setLoading] = useState(false);56const [llms, setLLMs] = useState<UserDefinedLLM[]>([]);57const [error, setError] = useState<string | null>(null);5859const [needAPIKey, setNeedAPIKey] = useState(false);60const [needEndpoint, setNeedEndpoint] = useState(false);6162const service: UserDefinedLLMService = useWatch("service", form);63useEffect(() => {64const v = service === "custom_openai" || service === "ollama";65setNeedAPIKey(!v);66setNeedEndpoint(v);67}, [service]);6869useEffect(() => {70setLoading(true);71const val = other_settings?.get(KEY) ?? "[]";72try {73const data: UserDefinedLLM[] = JSON.parse(val);74setLLMs(sortBy(data, "id"));75} catch (e) {76setError(`Error parsing custom LLMs: ${e}`);77setLLMs([]);78}79setLoading(false);80}, [other_settings?.get(KEY)]);8182useEffect(() => {83if (editLLM != null) {84form.setFieldsValue(editLLM);85} else {86form.resetFields();87}88}, [editLLM]);8990function getNextID(): number {91let id = 0;92llms.forEach((m) => (m.id > id ? (id = m.id) : null));93return id + 1;94}9596function save(next: UserDefinedLLM, oldID: number) {97// trim each field in next98for (const key in next) {99if (typeof next[key] === "string") {100next[key] = next[key].trim();101}102}103// set id if not set104next.id ??= getNextID();105106const { service, display, model, endpoint } = next;107if (108!display ||109!model ||110(needEndpoint && !endpoint) ||111(needAPIKey && !next.apiKey)112) {113setError("Please fill all fields – click the add button and fix it!");114return;115}116if (!SERVICES.includes(service as any)) {117setError(`Invalid service: ${service}`);118return;119}120try {121// replace an entry with the same ID, if it exists122const newModels = llms.filter((m) => m.id !== oldID);123newModels.push(next);124on_change(KEY, JSON.stringify(newModels));125setEditLLM(null);126} catch (err) {127setError(`Error saving custom LLM: ${err}`);128}129}130131function deleteLLM(model: string) {132try {133const newModels = llms.filter((m) => m.model !== model);134on_change(KEY, JSON.stringify(newModels));135} catch (err) {136setError(`Error deleting custom LLM: ${err}`);137}138}139140function addLLM() {141return (142<Button143block144icon={<Icon name="plus-circle-o" />}145onClick={() => {146if (!error) {147setEditLLM({148id: getNextID(),149service: "custom_openai",150display: "",151endpoint: "",152model: "",153apiKey: "",154});155} else {156setEditLLM(tmpLLM);157setError(null);158}159}}160>161<FormattedMessage162id="account.user-defined-llm.add_button.label"163defaultMessage="Add your own Language Model"164/>165</Button>166);167}168169async function test(llm: UserDefinedLLM) {170setLoading(true);171Modal.info({172closable: true,173title: `Test ${llm.display} (${llm.model})`,174content: <TestCustomLLM llm={llm} />,175okText: "Close",176});177setLoading(false);178}179180function renderList() {181return (182<List183loading={loading}184itemLayout="horizontal"185dataSource={llms}186renderItem={(item: UserDefinedLLM) => {187const { display, model, endpoint, service } = item;188if (!isLLMServiceName(service)) return null;189190return (191<List.Item192actions={[193<Button194icon={<Icon name="pen" />}195type="link"196onClick={() => {197setEditLLM(item);198}}199>200Edit201</Button>,202<Popconfirm203title={`Are you sure you want to delete the LLM ${display} (${model})?`}204onConfirm={() => deleteLLM(model)}205okText="Yes"206cancelText="No"207>208<Button icon={<Icon name="trash" />} type="link" danger>209Delete210</Button>211</Popconfirm>,212<Button213icon={<Icon name="play-circle" />}214type="link"215onClick={() => test(item)}216>217Test218</Button>,219]}220>221<Skeleton avatar title={false} loading={false} active>222<Tooltip223title={224<>225Model: {model}226<br />227Endpoint: {endpoint}228<br />229Service: {service}230</>231}232>233<List.Item.Meta234avatar={235<LanguageModelVendorAvatar236model={toUserLLMModelName(item)}237/>238}239title={display}240/>241</Tooltip>242</Skeleton>243</List.Item>244);245}}246/>247);248}249250function renderExampleModel() {251switch (service) {252case "custom_openai":253case "openai":254return "'gpt-4o'";255case "ollama":256return "'llama3:latest', 'phi3:instruct', ...";257case "anthropic":258return "'claude-3-sonnet-20240229'";259case "mistralai":260return "'open-mixtral-8x22b'";261case "google":262return "'gemini-1.5-flash'";263default:264unreachable(service);265return "'llama3:latest'";266}267}268269function renderForm() {270if (!editLLM) return null;271return (272<Modal273open={editLLM != null}274title="Edit Language Model"275onOk={() => {276const vals = form.getFieldsValue(true);277setTmpLLM(vals);278save(vals, editLLM.id);279setEditLLM(null);280}}281onCancel={() => {282setEditLLM(null);283}}284>285<Form286form={form}287layout="horizontal"288labelCol={{ span: 8 }}289wrapperCol={{ span: 16 }}290>291<Form.Item292label="Display Name"293name="display"294rules={[{ required: true }]}295help="e.g. 'MyLLM'"296>297<Input />298</Form.Item>299<Form.Item300label="Service"301name="service"302rules={[{ required: true }]}303help="Select the kind of server to talk to. Probably 'OpenAI API' or 'Ollama'"304>305<Select popupMatchSelectWidth={false}>306{SERVICES.map((option) => {307const { name, desc } = LLM_PROVIDER[option];308return (309<Select.Option key={option} value={option}>310<Tooltip title={desc} placement="right">311<Text strong>{name}</Text>: {trunc(desc, 50)}312</Tooltip>313</Select.Option>314);315})}316</Select>317</Form.Item>318<Form.Item319label="Model Name"320name="model"321rules={[{ required: true }]}322help={`This depends on the available models. e.g. ${renderExampleModel()}.`}323>324<Input />325</Form.Item>326<Form.Item327label="Endpoint URL"328name="endpoint"329rules={[{ required: needEndpoint }]}330help={331needEndpoint332? "e.g. 'https://your.ollama.server:11434/' or 'https://api.openai.com/v1'"333: "This setting is ignored."334}335>336<Input disabled={!needEndpoint} />337</Form.Item>338<Form.Item339label="API Key"340name="apiKey"341help="A secret string, which you got from the service provider."342rules={[{ required: needAPIKey }]}343>344<Input />345</Form.Item>346</Form>347</Modal>348);349}350351function renderError() {352if (!error) return null;353return <Alert message={error} type="error" closable />;354}355356const title = intl.formatMessage({357id: "account.user-defined-llm.title",358defaultMessage: "Bring your own Language Model",359});360361function renderContent() {362if (user_defined_llm) {363return (364<>365{renderForm()}366{renderList()}367{addLLM()}368{renderError()}369</>370);371} else {372return <Alert banner type="info" message="This feature is disabled." />;373}374}375376return (377<>378<Title level={5}>379{title}{" "}380<HelpIcon style={{ float: "right" }} maxWidth="300px" title={title}>381<FormattedMessage382id="account.user-defined-llm.info"383defaultMessage={`This allows you to call a {llm} of your own.384You either need an API key or run it on your own server.385Make sure to click on "Test" to check, that the communication to the API actually works.386Most likely, the type you are looking for is "Custom OpenAI" or "Ollama".`}387values={{388llm: (389<A href={"https://en.wikipedia.org/wiki/Large_language_model"}>390Large Language Model391</A>392),393}}394/>395</HelpIcon>396</Title>397398{renderContent()}399</>400);401}402403function TestCustomLLM({ llm }: { llm: UserDefinedLLM }) {404const [querying, setQuerying] = useState<boolean>(false);405const [prompt, setPrompt] = useState<string>("Capital of Australia?");406const [reply, setReply] = useState<string>("");407const [error, setError] = useState<string>("");408409async function doQuery() {410setQuerying(true);411setError("");412setReply("");413try {414const llmStream = webapp_client.openai_client.queryStream({415input: prompt,416project_id: null,417tag: "userdefined-llm-test",418model: toUserLLMModelName(llm),419system: "This is a test. Reply briefly.",420maxTokens: 100,421});422423let reply = "";424llmStream.on("token", (token) => {425if (token) {426reply += token;427setReply(reply);428} else {429setQuerying(false);430}431});432433llmStream.on("error", (err) => {434setError(err?.toString());435setQuerying(false);436});437} catch (e) {438setError(e.message);439setReply("");440setQuerying(false);441}442}443444// TODO implement a button (or whatever) to query the backend and show the response in real time445return (446<Space direction="vertical">447<Flex vertical={false} align="center" gap={5}>448<Flex>Prompt: </Flex>449<Input450value={prompt}451onChange={(e) => setPrompt(e.target.value)}452onPressEnter={doQuery}453/>454<Button loading={querying} type="primary" onClick={doQuery}>455Test456</Button>457</Flex>458{reply ? (459<>460Reply:461<RawPrompt input={reply} />462</>463) : null}464{error ? <Alert banner message={error} type="error" /> : null}465</Space>466);467}468469470