CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
sagemathinc

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.

GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/frontend/account/useLanguageModelSetting.tsx
Views: 687
1
import { redux, useMemo, useTypedRedux } from "@cocalc/frontend/app-framework";
2
import { getUserDefinedLLM } from "@cocalc/frontend/frame-editors/llm/use-userdefined-llm";
3
import {
4
LLMServicesAvailable,
5
LanguageService,
6
fromCustomOpenAIModel,
7
fromOllamaModel,
8
getValidLanguageModelName,
9
isCustomOpenAI,
10
isOllamaLLM,
11
isUserDefinedModel,
12
unpackUserDefinedLLMModel,
13
} from "@cocalc/util/db-schema/llm-utils";
14
15
export const SETTINGS_LANGUAGE_MODEL_KEY = "language_model";
16
17
// ATTN: it is tempting to use the `useProjectContext` hook here, but it is not possible
18
// The "AI Formula" dialog is outside the project context (unfortunately)
19
export function useLanguageModelSetting(
20
project_id?: string,
21
): [LanguageService, (llm: LanguageService) => void] {
22
const other_settings = useTypedRedux("account", "other_settings");
23
const default_llm = useTypedRedux("customize", "default_llm");
24
const ollama = useTypedRedux("customize", "ollama");
25
const custom_openai = useTypedRedux("customize", "custom_openai");
26
const selectableLLMs = useTypedRedux("customize", "selectable_llms");
27
28
const haveOpenAI = useTypedRedux("customize", "openai_enabled");
29
const haveGoogle = useTypedRedux("customize", "google_vertexai_enabled");
30
const haveOllama = useTypedRedux("customize", "ollama_enabled");
31
const haveCustomOpenAI = useTypedRedux("customize", "custom_openai_enabled");
32
const haveMistral = useTypedRedux("customize", "mistral_enabled");
33
const haveAnthropic = useTypedRedux("customize", "anthropic_enabled");
34
35
const enabledLLMs: LLMServicesAvailable = useMemo(() => {
36
const projectsStore = redux.getStore("projects");
37
return projectsStore.whichLLMareEnabled(project_id);
38
}, [
39
haveOpenAI,
40
haveGoogle,
41
haveOllama,
42
haveCustomOpenAI,
43
haveMistral,
44
haveAnthropic,
45
]);
46
47
const llm: LanguageService = useMemo(() => {
48
return getValidLanguageModelName({
49
model: other_settings?.get("language_model") ?? default_llm,
50
filter: enabledLLMs,
51
ollama: Object.keys(ollama?.toJS() ?? {}),
52
custom_openai: Object.keys(custom_openai?.toJS() ?? {}),
53
selectable_llms: selectableLLMs?.toJS() ?? [],
54
});
55
}, [other_settings, custom_openai, ollama, selectableLLMs, enabledLLMs]);
56
57
function setLLM(llm: LanguageService) {
58
setDefaultLLM(llm);
59
}
60
61
return [llm, setLLM];
62
}
63
64
// This changes the account's default LLM
65
export function setDefaultLLM(llm: LanguageService) {
66
const customizeStore = redux.getStore("customize");
67
const selectableLLMs = customizeStore.get("selectable_llms");
68
const ollama = customizeStore.get("ollama");
69
const custom_openai = customizeStore.get("custom_openai");
70
71
if (selectableLLMs.includes(llm as any)) {
72
redux
73
.getActions("account")
74
.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);
75
} else if (isOllamaLLM(llm) && ollama?.get(fromOllamaModel(llm))) {
76
// check if LLM is a key in the Ollama TypedMap
77
redux
78
.getActions("account")
79
.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);
80
} else if (
81
isCustomOpenAI(llm) &&
82
custom_openai?.get(fromCustomOpenAIModel(llm))
83
) {
84
redux
85
.getActions("account")
86
.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);
87
} else if (isUserDefinedModel(llm) && userDefinedLLMExists(llm)) {
88
redux
89
.getActions("account")
90
.set_other_settings(SETTINGS_LANGUAGE_MODEL_KEY, llm);
91
} else {
92
console.warn(`setDefaultLLM: LLM "${llm}" is unknown.`);
93
}
94
}
95
96
function userDefinedLLMExists(model: string): boolean {
97
const user_llm = getUserDefinedLLM();
98
const um = unpackUserDefinedLLMModel(model);
99
if (um == null) return false;
100
return user_llm.some((m) => m.service === um.service && m.model === um.model);
101
}
102
103