Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Path: blob/master/src/packages/frontend/account/useLanguageModelSetting.tsx
Views: 687
import { redux, useMemo, useTypedRedux } from "@cocalc/frontend/app-framework";1import { getUserDefinedLLM } from "@cocalc/frontend/frame-editors/llm/use-userdefined-llm";2import {3LLMServicesAvailable,4LanguageService,5fromCustomOpenAIModel,6fromOllamaModel,7getValidLanguageModelName,8isCustomOpenAI,9isOllamaLLM,10isUserDefinedModel,11unpackUserDefinedLLMModel,12} from "@cocalc/util/db-schema/llm-utils";1314export const SETTINGS_LANGUAGE_MODEL_KEY = "language_model";1516// ATTN: it is tempting to use the `useProjectContext` hook here, but it is not possible17// The "AI Formula" dialog is outside the project context (unfortunately)18export function useLanguageModelSetting(19project_id?: string,20): [LanguageService, (llm: LanguageService) => void] {21const other_settings = useTypedRedux("account", "other_settings");22const default_llm = useTypedRedux("customize", "default_llm");23const ollama = useTypedRedux("customize", "ollama");24const custom_openai = useTypedRedux("customize", "custom_openai");25const selectableLLMs = useTypedRedux("customize", "selectable_llms");2627const haveOpenAI = useTypedRedux("customize", "openai_enabled");28const haveGoogle = useTypedRedux("customize", "google_vertexai_enabled");29const haveOllama = useTypedRedux("customize", "ollama_enabled");30const haveCustomOpenAI = useTypedRedux("customize", "custom_openai_enabled");31const haveMistral = useTypedRedux("customize", "mistral_enabled");32const haveAnthropic = useTypedRedux("customize", "anthropic_enabled");3334const enabledLLMs: LLMServicesAvailable = useMemo(() => {35const projectsStore = redux.getStore("projects");36return projectsStore.whichLLMareEnabled(project_id);37}, [38haveOpenAI,39haveGoogle,40haveOllama,41haveCustomOpenAI,42haveMistral,43haveAnthropic,44]);4546const llm: LanguageService = useMemo(() => {47return getValidLanguageModelName({48model: other_settings?.get("language_model") ?? default_llm,49filter: enabledLLMs,50ollama: Object.keys(ollama?.toJS() ?? {}),51custom_openai: Object.keys(custom_openai?.toJS() ?? {}),52selectable_llms: selectableLLMs?.toJS() ?? [],53});54}, [other_settings, custom_openai, ollama, selectableLLMs, enabledLLMs]);5556function setLLM(llm: LanguageService) {57setDefaultLLM(llm);58}5960return [llm, setLLM];61}6263// This changes the account's default LLM64export function setDefaultLLM(llm: LanguageService) {65const customizeStore = redux.getStore("customize");66const selectableLLMs = customizeStore.get("selectable_llms");67const ollama = customizeStore.get("ollama");68const custom_openai = customizeStore.get("custom_openai");6970if (selectableLLMs.includes(llm as any)) {71redux72.getActions("account")73.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);74} else if (isOllamaLLM(llm) && ollama?.get(fromOllamaModel(llm))) {75// check if LLM is a key in the Ollama TypedMap76redux77.getActions("account")78.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);79} else if (80isCustomOpenAI(llm) &&81custom_openai?.get(fromCustomOpenAIModel(llm))82) {83redux84.getActions("account")85.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);86} else if (isUserDefinedModel(llm) && userDefinedLLMExists(llm)) {87redux88.getActions("account")89.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);90} else {91console.warn(`setDefaultLLM: LLM "${llm}" is unknown.`);92}93}9495function userDefinedLLMExists(model: string): boolean {96const user_llm = getUserDefinedLLM();97const um = unpackUserDefinedLLMModel(model);98if (um == null) return false;99return user_llm.some((m) => m.service === um.service && m.model === um.model);100}101102103