Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Path: blob/master/src/packages/frontend/admin/llm/index.tsx
Views: 687
import { Button, Col, Input, Row, Select, Space, Switch } from "antd";12import {3CSS,4redux,5useState,6useTypedRedux,7} from "@cocalc/frontend/app-framework";8import { Paragraph, Title } from "@cocalc/frontend/components";9import { LLMModelName } from "@cocalc/frontend/components/llm-name";10import {11LLMServiceName,12LLM_PROVIDER,13LanguageModelCore,14USER_SELECTABLE_LLMS_BY_VENDOR,15isCoreLanguageModel,16toCustomOpenAIModel,17toOllamaModel,18} from "@cocalc/util/db-schema/llm-utils";19import { getRandomColor, trunc_middle } from "@cocalc/util/misc";20import { TestLLM } from "./test-component";21import { PROMPTS } from "./tests";22import { Value } from "./value";2324export function TestLLMAdmin() {25const customize = redux.getStore("customize");26const globallyEnabledLLMs = customize.getEnabledLLMs();27const selectableLLMs = useTypedRedux("customize", "selectable_llms");28const ollama = useTypedRedux("customize", "ollama");29const custom_openai = useTypedRedux("customize", "custom_openai");30const [test, setTest] = useState<number | null>(0);31// TODO: this is used to trigger sending queries – makes no sense that all of them disable it. fix this.32const [querying, setQuerying] = useState<boolean>();33const [all, setAll] = useState<boolean>(false);3435function llmStyle(llm: string): CSS {36return {37marginLeft: "5px",38marginBottom: "5px",39borderLeft: `5px solid ${getRandomColor(llm, {40min: 0,41max: 255,42diff: 100,43})}`,44};45}4647function renderStatus(llm: LanguageModelCore, vendor: LLMServiceName) {48const enabled = all || selectableLLMs.includes(llm);4950return (51<Row gutter={[10, 20]} style={llmStyle(llm)} key={`${vendor}-${llm}`}>52<Col md={24}>53<Space>54<Value val={enabled} /> <LLMModelName model={llm} />55</Space>56</Col>57<Col md={24}>58{enabled ? (59<TestLLM60test={test}61model={llm}62queryState={[querying, setQuerying]}63/>64) : undefined}65</Col>66</Row>67);68}6970function renderCustomOpenAI() {71return (72<Col key={"custom_openai"} md={12} xs={24}>73<Title level={5}>Custom OpenAI</Title>74{Object.entries(custom_openai?.toJS() ?? {}).map(([key, _val]) => {75const model = toCustomOpenAIModel(key);7677return (78<Row79gutter={[10, 20]}80style={llmStyle(model)}81key={`custom_openai-${key}`}82>83<Col md={24}>84<Space>85<Value val={true} /> <LLMModelName model={model} />86</Space>87</Col>88<Col md={24}>89<TestLLM90test={test}91model={model}92queryState={[querying, setQuerying]}93/>94</Col>95</Row>96);97})}98</Col>99);100}101102function renderOllama() {103return (104<Col key={"ollama"} md={12} xs={24}>105<Title level={5}>Ollama</Title>106{Object.entries(ollama?.toJS() ?? {}).map(([key, _val]) => {107const model = toOllamaModel(key);108109return (110<Row111gutter={[10, 20]}112style={llmStyle(model)}113key={`ollama-${key}`}114>115<Col md={24}>116<Space>117<Value val={true} /> <LLMModelName model={model} />118</Space>119</Col>120<Col md={24}>121<TestLLM122test={test}123model={model}124queryState={[querying, setQuerying]}125/>126</Col>127</Row>128);129})}130</Col>131);132}133134return (135<div>136<Paragraph>137Globally enabled LLMs (Admin Settings):138<Value val={globallyEnabledLLMs} />.139</Paragraph>140<Paragraph>141<Space>142<Input143value={test != null ? PROMPTS[test].prompt : ""}144disabled={true || querying}145onChange={(e) => setTest(parseInt(e.target.value))}146placeholder="Enter a query..."147addonAfter={148<Select149onSelect={setTest}150defaultValue={0}151popupMatchSelectWidth={false}152>153{PROMPTS.map((p, i) => (154<Select.Option key={i} value={i}>155{trunc_middle(p.prompt, 25)}156</Select.Option>157))}158</Select>159}160/>161<Button162type="primary"163onClick={() => setQuerying(true)}164disabled={test == null || querying}165>166Run Tests167</Button>168<Button onClick={() => setTest(null)}>Clear</Button>169<Switch onChange={(e) => setAll(e)} /> All170</Space>171</Paragraph>172<Paragraph>173<Row gutter={[10, 10]}>174{Object.entries(USER_SELECTABLE_LLMS_BY_VENDOR).map(175([vendor, llms]) =>176vendor !== "ollama" && vendor !== "custom_openai" ? (177<Col key={vendor} md={12} xs={24}>178<Title level={5}>{LLM_PROVIDER[vendor].name}</Title>179{llms180.filter(isCoreLanguageModel)181.map((llm) => renderStatus(llm, vendor as LLMServiceName))}182</Col>183) : undefined,184)}185{renderOllama()}186{renderCustomOpenAI()}187</Row>188</Paragraph>189190<Title level={5}>Ollama configuration</Title>191<Value val={ollama} />192<Title level={5}>Custom OpenAI API</Title>193<Value val={custom_openai} />194</div>195);196}197198199