Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Path: blob/master/src/packages/frontend/codemirror/extensions/ai-formula.tsx
Views: 687
import { Button, Descriptions, Divider, Input, Modal, Space } from "antd";1import { debounce } from "lodash";2import { useIntl } from "react-intl";34import { useLanguageModelSetting } from "@cocalc/frontend/account/useLanguageModelSetting";5import {6redux,7useAsyncEffect,8useEffect,9useState,10useTypedRedux,11} from "@cocalc/frontend/app-framework";12import { Localize, useLocalizationCtx } from "@cocalc/frontend/app/localize";13import type { Message } from "@cocalc/frontend/client/types";14import {15HelpIcon,16Icon,17Markdown,18Paragraph,19Text,20Title,21} from "@cocalc/frontend/components";22import AIAvatar from "@cocalc/frontend/components/ai-avatar";23import { LLMModelName } from "@cocalc/frontend/components/llm-name";24import LLMSelector from "@cocalc/frontend/frame-editors/llm/llm-selector";25import { dialogs } from "@cocalc/frontend/i18n";26import { show_react_modal } from "@cocalc/frontend/misc";27import { LLMCostEstimation } from "@cocalc/frontend/misc/llm-cost-estimation";28import track from "@cocalc/frontend/user-tracking";29import { webapp_client } from "@cocalc/frontend/webapp-client";30import { isFreeModel } from "@cocalc/util/db-schema/llm-utils";31import { Locale } from "@cocalc/util/i18n";32import { unreachable } from "@cocalc/util/misc";3334type Mode = "tex" | "md";3536const LLM_USAGE_TAG = `generate-formula`;3738interface Opts {39mode: Mode;40text?: string;41project_id: string;42locale?: Locale;43}4445export async function ai_gen_formula({46mode,47text = "",48project_id,49locale,50}: Opts): Promise<string> {51return await show_react_modal((cb) => (52<Localize>53<AiGenFormula54mode={mode}55text={text}56project_id={project_id}57locale={locale}58cb={cb}59/>60</Localize>61));62}6364interface Props extends Opts {65cb: (err?: string, result?: string) => void;66}6768function AiGenFormula({ mode, text = "", project_id, locale, cb }: Props) {69const intl = useIntl();70const { setLocale } = useLocalizationCtx();71const is_cocalc_com = useTypedRedux("customize", "is_cocalc_com");72const [model, setModel] = useLanguageModelSetting(project_id);73const [input, setInput] = useState<string>(text);74const [formula, setFormula] = useState<string>("");75const [fullReply, setFullReply] = useState<string>("");76const [generating, setGenerating] = useState<boolean>(false);77const [error, setError] = useState<string | undefined>(undefined);78const [tokens, setTokens] = useState<number>(0);7980useEffect(() => {81if (typeof locale === "string") {82setLocale(locale);83}84}, [locale]);8586useAsyncEffect(87debounce(88async () => {89const { input, history, system } = getPrompt() ?? "";90// compute the number of tokens (this MUST be a lazy import):91const { getMaxTokens, numTokensUpperBound } = await import(92"@cocalc/frontend/misc/llm"93);9495const all = [96input,97history.map(({ content }) => content).join(" "),98system,99].join(" ");100setTokens(numTokensUpperBound(all, getMaxTokens(model)));101},1021000,103{ leading: true, trailing: true },104),105106[model, input],107);108109const enabled = redux110.getStore("projects")111.hasLanguageModelEnabled(project_id, LLM_USAGE_TAG);112113function getSystemPrompt(): string {114const p1 = `Typset the plain-text description of a mathematical formula as a LaTeX formula. The formula will be`;115const p2 = `Return only the LaTeX formula, ready to be inserted into the document. Do not add any explanations.`;116switch (mode) {117case "tex":118return `${p1} in a *.tex file. Assume the package "amsmath" is available. ${p2}`;119case "md":120return `${p1} in a markdown file. Formulas are inside of $ or $$. ${p2}`;121default:122unreachable(mode);123return p1;124}125}126127function getPrompt(): { input: string; history: Message[]; system: string } {128const system = getSystemPrompt();129// 3-shot examples130const history: Message[] = [131{ role: "user", content: "equation e^(i pi) = -1" },132{ role: "assistant", content: "$$e^{i \\pi} = -1$$" },133{134role: "user",135content: "integral 0 to 2 pi sin(x)^2",136},137{138role: "assistant",139content: "$\\int_{0}^{2\\pi} \\sin(x)^2 \\, \\mathrm{d}x$",140},141{142role: "user",143content: "equation system: [ 1 + x^2 = a, 1 - y^2 = ln(a) ]",144},145{146role: "assistant",147content:148"\\begin{cases}\n1 + x^2 = a \\\n1 - y^2 = \\ln(a)\n\\end{cases}",149},150];151return { input: input || text, system, history };152}153154function wrapFormula(tex: string = "") {155// wrap single-line formulas in $...$156// if it is multiline, wrap in \begin{equation}...\end{equation}157// but only wrap if actually necessary158tex = tex.trim();159if (tex.split("\n").length > 1) {160if (tex.includes("\\begin{")) {161return tex;162} else if (tex.startsWith("$$") && tex.endsWith("$$")) {163return tex;164} else {165return `\\begin{equation}\n${tex}\n\\end{equation}`;166}167} else {168if (tex.startsWith("$") && tex.endsWith("$")) {169return tex;170} else if (tex.startsWith("\\(") && tex.endsWith("\\)")) {171return tex;172} else {173return `$${tex}$`;174}175}176}177178function processFormula(formula: string): string {179let tex = "";180// iterate over all lines in formula. save everything between the first ``` and last ``` in tex181let inCode = false;182for (const line of formula.split("\n")) {183if (line.startsWith("```")) {184inCode = !inCode;185} else if (inCode) {186tex += line + "\n";187}188}189// we found nothing -> the entire formula string is the tex code190if (!tex) {191tex = formula;192}193// if there are "\[" and "\]" in the formula, replace both by $$194if (tex.includes("\\[") && tex.includes("\\]")) {195tex = tex.replace(/\\\[|\\\]/g, "$$");196}197// similar, replace "\(" and "\)" by single $ signs198if (tex.includes("\\(") && tex.includes("\\)")) {199tex = tex.replace(/\\\(|\\\)/g, "$");200}201// if there are at least two $$ or $ in the tex, we extract the part between the first and second $ or $$202// This is necessary, because despite the prompt, some LLM return stuff like: "Here is the LaTeX formula: $$ ... $$."203for (const delimiter of ["$$", "$"]) {204const parts = tex.split(delimiter);205if (parts.length >= 3) {206tex = parts[1];207break;208}209}210setFormula(tex);211return tex;212}213214async function doGenerate() {215try {216setError(undefined);217setGenerating(true);218setFormula("");219setFullReply("");220track("chatgpt", {221project_id,222tag: LLM_USAGE_TAG,223mode,224type: "generate",225model,226});227const { system, input, history } = getPrompt();228const reply = await webapp_client.openai_client.query({229input,230history,231system,232model,233project_id,234tag: LLM_USAGE_TAG,235});236const tex = processFormula(reply);237// significant differece? Also show the full reply238if (reply.length > 2 * tex.length) {239setFullReply(reply);240} else {241setFullReply("");242}243} catch (err) {244setError(err.message || err.toString());245} finally {246setGenerating(false);247}248}249250// Start the query immediately, if the user had selected some text … and it's a free model251useEffect(() => {252if (text && isFreeModel(model, is_cocalc_com)) {253doGenerate();254}255}, [text]);256257function renderTitle() {258return (259<>260<Title level={4}>261<AIAvatar size={20} /> Generate LaTeX Formula262</Title>263{enabled ? (264<>265{intl.formatMessage(dialogs.select_llm)}:{" "}266<LLMSelector267project_id={project_id}268model={model}269setModel={setModel}270/>271</>272) : undefined}273</>274);275}276277function renderContent() {278const help = (279<HelpIcon title="Usage" extra="Help">280<Paragraph>281You can enter the description of your desired formula in various ways:282<ul>283<li>284natural language: <Text code>drake equation</Text>,285</li>286<li>287simple algebraic notation:{" "}288<Text code>(a+b)^2 = a^2 + 2 a b + b^2</Text>,289</li>290<li>291or a combination of both:{" "}292<Text code>integral from 0 to infinity of (1+sin(x))/x^2 dx</Text>293.294</li>295</ul>296</Paragraph>297<Paragraph>298If the formula is not quite right, click "Geneate" once again, try a299different language model, or adjust the description. Of course, you300can also edit it as usual after you have inserted it.301</Paragraph>302<Paragraph>303Once you're happy, click the "Insert formula" button and the generated304LaTeX formula will be inserted at the current cursor position. The305"Insert fully reply" button will, well, insert the entire answer.306</Paragraph>307<Paragraph>308Prior to opening this dialog, you can even select a portion of your309text. This will be used as your description and the AI language model310will be queried immediately. Inserting the formula will then replace311the selected text.312</Paragraph>313</HelpIcon>314);315return (316<Space direction="vertical" size="middle" style={{ width: "100%" }}>317<Paragraph style={{ marginBottom: 0 }}>318The <LLMModelName model={model} size={18} /> language model will319generate a LaTeX formula based on your description. {help}320</Paragraph>321<div style={{ textAlign: "right" }}>322<LLMCostEstimation323// limited to 200, since we only get a formula – which is not a lengthy text!324maxOutputTokens={200}325model={model}326tokens={tokens}327type="secondary"328/>329</div>330<Space.Compact style={{ width: "100%" }}>331<Input332allowClear333disabled={generating}334placeholder={335"Describe the formula in natural language and/or algebraic notation."336}337defaultValue={text}338onChange={(e) => setInput(e.target.value)}339onPressEnter={doGenerate}340addonBefore={<Icon name="fx" />}341/>342<Button343disabled={!input.trim() || generating}344loading={generating}345onClick={doGenerate}346type={formula ? "default" : "primary"}347>348Generate349</Button>350</Space.Compact>351{formula ? (352<Descriptions353size={"small"}354column={1}355bordered356items={[357{358key: "1",359label: "LaTeX",360children: <Paragraph code>{formula}</Paragraph>,361},362{363key: "2",364label: "Preview",365children: <Markdown value={wrapFormula(formula)} />,366},367...(fullReply368? [369{370key: "3",371label: "Full reply",372children: <Markdown value={fullReply} />,373},374]375: []),376]}377/>378) : undefined}379{error ? <Paragraph type="danger">{error}</Paragraph> : undefined}380{mode === "tex" ? (381<>382<Divider />383<Paragraph type="secondary">384Note: You might have to ensure that{" "}385<code>{"\\usepackage{amsmath}"}</code> is loaded in the preamble.386</Paragraph>387</>388) : undefined}389</Space>390);391}392393function renderButtons() {394return (395<div>396<Button onClick={onCancel}>Cancel</Button>397<Button398type={"default"}399disabled={!fullReply}400onClick={() => cb(undefined, `\n\n${fullReply}\n\n`)}401>402Insert full reply403</Button>404<Button405type={formula ? "primary" : "default"}406disabled={!formula}407onClick={() => cb(undefined, wrapFormula(formula))}408>409Insert formula410</Button>411</div>412);413}414415function renderBody() {416if (!enabled) {417return <div>AI language models are disabled.</div>;418}419return renderContent();420}421422function onCancel() {423cb(undefined, text);424}425426return (427<Modal428title={renderTitle()}429open430footer={renderButtons()}431onCancel={onCancel}432centered433width={"70vw"}434>435{renderBody()}436</Modal>437);438}439440441