Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Path: blob/master/src/packages/hub/webapp-configuration.ts
Views: 687
/*1* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.2* License: MS-RSL – see LICENSE.md for details3*/45// This unifies the entire webapp configuration – endpoint /customize6// The main goal is to optimize this, to use as little DB interactions7// as necessary, use caching, etc.8// This manages the webapp's configuration based on the hostname9// (allows whitelabeling).1011import { delay } from "awaiting";12import debug from "debug";13import { isEmpty } from "lodash";14import LRU from "lru-cache";1516import type { PostgreSQL } from "@cocalc/database/postgres/types";17import { get_passport_manager, PassportManager } from "@cocalc/server/hub/auth";18import { getSoftwareEnvironments } from "@cocalc/server/software-envs";19import { callback2 as cb2 } from "@cocalc/util/async-utils";20import { EXTRAS as SERVER_SETTINGS_EXTRAS } from "@cocalc/util/db-schema/site-settings-extras";21import { SoftwareEnvConfig } from "@cocalc/util/sanitize-software-envs";22import { site_settings_conf as SITE_SETTINGS_CONF } from "@cocalc/util/schema";23import { CustomLLMPublic } from "@cocalc/util/types/llm";24import { parseDomain, ParseResultType } from "parse-domain";25import getServerSettings, {26ServerSettingsDynamic,27} from "./servers/server-settings";28import { have_active_registration_tokens } from "./utils";2930const L = debug("hub:webapp-config");3132const CACHE = new LRU({ max: 1000, ttl: 30 * 1000 }); // 1 minutes3334export function clear_cache(): void {35CACHE.clear();36}3738type Theme = { [key: string]: string | boolean };3940interface Config {41// todo42configuration: any;43registration: any;44strategies: object;45software: SoftwareEnvConfig | null;46ollama: { [key: string]: CustomLLMPublic };47custom_openai: { [key: string]: CustomLLMPublic };48}4950async function get_passport_manager_async(): Promise<PassportManager> {51// the only issue here is, that the http server already starts up before the52// passport manager is configured – but, the passport manager depends on the http server53// we just retry during that initial period of uncertainty…54let ms = 100;55while (true) {56const pp_manager = get_passport_manager();57if (pp_manager != null) {58return pp_manager;59} else {60L(61`WARNING: Passport Manager not available yet -- trying again in ${ms}ms`,62);63await delay(ms);64ms = Math.min(10000, 1.3 * ms);65}66}67}6869export class WebappConfiguration {70private readonly db: PostgreSQL;71private data?: ServerSettingsDynamic;7273constructor({ db }) {74this.db = db;75this.init();76}7778private async init(): Promise<void> {79// this.data.pub updates automatically – do not modify it!80this.data = await getServerSettings();81await get_passport_manager_async();82}8384// server settings with whitelabeling settings85// TODO post-process all values86public async settings(vID: string) {87const res = await cb2(this.db._query, {88query: "SELECT id, settings FROM whitelabeling",89cache: true,90where: { "id = $::TEXT": vID },91});92if (this.data == null) {93// settings not yet initialized94return {};95}96const data = res.rows[0];97if (data != null) {98return { ...this.data.all, ...data.settings };99} else {100return this.data.all;101}102}103104// derive the vanity ID from the host string105private get_vanity_id(host: string): string | undefined {106const host_parsed = parseDomain(host);107if (host_parsed.type === ParseResultType.Listed) {108// vanity for vanity.cocalc.com or foo.p for foo.p.cocalc.com109return host_parsed.subDomains.join(".");110}111return undefined;112}113114private async theme(vID: string): Promise<Theme> {115const res = await cb2(this.db._query, {116query: "SELECT id, theme FROM whitelabeling",117cache: true,118where: { "id = $::TEXT": vID },119});120const data = res.rows[0];121if (data != null) {122// post-process data, but do not set default values…123const theme: Theme = {};124for (const [key, value] of Object.entries(data.theme)) {125const config = SITE_SETTINGS_CONF[key] ?? SERVER_SETTINGS_EXTRAS[key];126if (typeof config?.to_val == "function") {127theme[key] = config.to_val(value, data.theme);128} else {129if (typeof value == "string" || typeof value == "boolean") {130theme[key] = value;131}132}133}134L(`vanity theme=${JSON.stringify(theme)}`);135return theme;136} else {137L(`theme id=${vID} not found`);138return {};139}140}141142private async get_vanity(vID): Promise<object> {143if (vID != null && vID !== "") {144L(`vanity ID = "${vID}"`);145return await this.theme(vID);146} else {147return {};148}149}150151// returns the global configuration + eventually vanity specific site config settings152private async get_configuration({ host, country }) {153if (this.data == null) {154// settings not yet initialized155return {};156}157const vID = this.get_vanity_id(host);158const config = this.data.pub;159const vanity = await this.get_vanity(vID);160return { ...config, ...vanity, ...{ country, dns: host } };161}162163private async get_strategies(): Promise<object> {164const key = "strategies";165let strategies = CACHE.get(key);166if (strategies == null) {167// wait until this.passport_manager is initialized.168// this could happen right at the start of the server169const passport_manager = await get_passport_manager_async();170strategies = passport_manager.get_strategies_v2();171CACHE.set(key, strategies);172}173return strategies as object;174}175176// derives the public ollama model configuration from the private one177private get_ollama_public(): { [key: string]: CustomLLMPublic } {178if (this.data == null) {179throw new Error("server settings not yet initialized");180}181const ollama = this.data.all.ollama_configuration;182return processCustomLLM(ollama, "Ollama");183}184185private get_custom_openai_public(): { [key: string]: CustomLLMPublic } {186if (this.data == null) {187throw new Error("server settings not yet initialized");188}189const custom_openai = this.data.all.custom_openai_configuration;190return processCustomLLM(custom_openai, "OpenAI (custom)");191}192193private async get_config({ country, host }): Promise<Config> {194while (this.data == null) {195L.debug("waiting for server settings to be initialized");196await delay(100);197}198199const [configuration, registration, software, ollama, custom_openai] =200await Promise.all([201this.get_configuration({ host, country }),202have_active_registration_tokens(this.db),203getSoftwareEnvironments("webapp"),204this.get_ollama_public(),205this.get_custom_openai_public(),206]);207const strategies = await this.get_strategies();208return {209configuration,210registration,211strategies,212software,213ollama,214custom_openai,215};216}217218// it returns a shallow copy, hence you can modify/add keys in the returned map!219public async get({ country, host }): Promise<Config> {220const key = `config::${country}::${host}`;221let config = CACHE.get(key);222if (config == null) {223config = await this.get_config({ country, host });224CACHE.set(key, config);225} else {226L(`cache hit -- '${key}'`);227}228return config as Config;229}230}231232// for Ollama or Custom OpenAI233function processCustomLLM(234data: any,235displayFallback,236): { [key: string]: CustomLLMPublic } {237if (isEmpty(data)) return {};238239const ret: { [key: string]: CustomLLMPublic } = {};240for (const key in data) {241const conf = data[key];242const cocalc = conf.cocalc ?? {};243if (cocalc.disabled) continue;244const model = conf.model ?? key;245ret[key] = {246model,247display: cocalc.display ?? `${displayFallback} ${model}`,248icon: cocalc.icon, // fallback is the Ollama or OpenAI icon, frontend does that249desc: cocalc.desc ?? "",250};251}252return ret;253}254255256