Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
sagemathinc
GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/frontend/codemirror/extensions/ai-formula.tsx
5949 views
1
/*
2
* This file is part of CoCalc: Copyright © 2025 Sagemath, Inc.
3
* License: MS-RSL – see LICENSE.md for details
4
*/
5
6
import { Button, Descriptions, Divider, Input, Modal, Space } from "antd";
7
import { debounce } from "lodash";
8
import { FormattedMessage, useIntl } from "react-intl";
9
10
import { useLanguageModelSetting } from "@cocalc/frontend/account/useLanguageModelSetting";
11
import {
12
redux,
13
useAsyncEffect,
14
useEffect,
15
useState,
16
useTypedRedux,
17
} from "@cocalc/frontend/app-framework";
18
import { Localize, useLocalizationCtx } from "@cocalc/frontend/app/localize";
19
import type { Message } from "@cocalc/frontend/client/types";
20
import {
21
HelpIcon,
22
Icon,
23
Markdown,
24
Paragraph,
25
Text,
26
Title,
27
} from "@cocalc/frontend/components";
28
import AIAvatar from "@cocalc/frontend/components/ai-avatar";
29
import { LLMModelName } from "@cocalc/frontend/components/llm-name";
30
import { useLLMHistory } from "@cocalc/frontend/frame-editors/llm/use-llm-history";
31
import { LLMHistorySelector } from "@cocalc/frontend/frame-editors/llm/llm-history-selector";
32
import LLMSelector from "@cocalc/frontend/frame-editors/llm/llm-selector";
33
import { dialogs, labels } from "@cocalc/frontend/i18n";
34
import { show_react_modal } from "@cocalc/frontend/misc";
35
import { LLMCostEstimation } from "@cocalc/frontend/misc/llm-cost-estimation";
36
import track from "@cocalc/frontend/user-tracking";
37
import { webapp_client } from "@cocalc/frontend/webapp-client";
38
import { isFreeModel } from "@cocalc/util/db-schema/llm-utils";
39
import { Locale } from "@cocalc/util/i18n";
40
import { unreachable } from "@cocalc/util/misc";
41
42
type Mode = "tex" | "md";
43
44
const LLM_USAGE_TAG = `generate-formula`;
45
46
interface Opts {
47
mode: Mode;
48
text?: string;
49
project_id: string;
50
locale?: Locale;
51
}
52
53
export async function ai_gen_formula({
54
mode,
55
text = "",
56
project_id,
57
locale,
58
}: Opts): Promise<string> {
59
return await show_react_modal((cb) => (
60
<Localize>
61
<AiGenFormula
62
mode={mode}
63
text={text}
64
project_id={project_id}
65
locale={locale}
66
cb={cb}
67
/>
68
</Localize>
69
));
70
}
71
72
interface Props extends Opts {
73
cb: (err?: string, result?: string) => void;
74
}
75
76
function AiGenFormula({ mode, text = "", project_id, locale, cb }: Props) {
77
const intl = useIntl();
78
const { setLocale } = useLocalizationCtx();
79
const is_cocalc_com = useTypedRedux("customize", "is_cocalc_com");
80
const [model, setModel] = useLanguageModelSetting(project_id);
81
const [input, setInput] = useState<string>(text);
82
const [formula, setFormula] = useState<string>("");
83
const [fullReply, setFullReply] = useState<string>("");
84
const [generating, setGenerating] = useState<boolean>(false);
85
const [error, setError] = useState<string | undefined>(undefined);
86
const [tokens, setTokens] = useState<number>(0);
87
const { prompts: historyPrompts, addPrompt } = useLLMHistory("formula");
88
89
useEffect(() => {
90
if (typeof locale === "string") {
91
setLocale(locale);
92
}
93
}, [locale]);
94
95
useAsyncEffect(
96
debounce(
97
async () => {
98
const { input, history, system } = getPrompt() ?? "";
99
// compute the number of tokens (this MUST be a lazy import):
100
const { getMaxTokens, numTokensEstimate } =
101
await import("@cocalc/frontend/misc/llm");
102
103
const all = [
104
input,
105
history.map(({ content }) => content).join(" "),
106
system,
107
].join(" ");
108
setTokens(numTokensEstimate(all, getMaxTokens(model)));
109
},
110
1000,
111
{ leading: true, trailing: true },
112
),
113
114
[model, input],
115
);
116
117
const enabled = redux
118
.getStore("projects")
119
.hasLanguageModelEnabled(project_id, LLM_USAGE_TAG);
120
121
function getSystemPrompt(): string {
122
const p1 = `Typeset the plain-text description of a mathematical formula as a LaTeX formula. The formula will be`;
123
const p2 = `Return only the LaTeX formula, ready to be inserted into the document. Do not add any explanations.`;
124
switch (mode) {
125
case "tex":
126
return `${p1} in a *.tex file. Assume the package "amsmath" is available. ${p2}`;
127
case "md":
128
return `${p1} in a markdown file. Formulas are inside of $ or $$. ${p2}`;
129
default:
130
unreachable(mode);
131
return p1;
132
}
133
}
134
135
function getPrompt(): { input: string; history: Message[]; system: string } {
136
const system = getSystemPrompt();
137
// 3-shot examples
138
const history: Message[] = [
139
{ role: "user", content: "equation e^(i pi) = -1" },
140
{ role: "assistant", content: "$$e^{i \\pi} = -1$$" },
141
{
142
role: "user",
143
content: "integral 0 to 2 pi sin(x)^2",
144
},
145
{
146
role: "assistant",
147
content: "$\\int_{0}^{2\\pi} \\sin(x)^2 \\, \\mathrm{d}x$",
148
},
149
{
150
role: "user",
151
content: "equation system: [ 1 + x^2 = a, 1 - y^2 = ln(a) ]",
152
},
153
{
154
role: "assistant",
155
content:
156
"\\begin{cases}\n1 + x^2 = a \\\n1 - y^2 = \\ln(a)\n\\end{cases}",
157
},
158
];
159
return { input: input || text, system, history };
160
}
161
162
function wrapFormula(tex: string = "") {
163
// wrap single-line formulas in $...$
164
// if it is multiline, wrap in \begin{equation}...\end{equation}
165
// but only wrap if actually necessary
166
tex = tex.trim();
167
if (tex.split("\n").length > 1) {
168
if (tex.includes("\\begin{")) {
169
return tex;
170
} else if (tex.startsWith("$$") && tex.endsWith("$$")) {
171
return tex;
172
} else {
173
return `\\begin{equation}\n${tex}\n\\end{equation}`;
174
}
175
} else {
176
if (tex.startsWith("$") && tex.endsWith("$")) {
177
return tex;
178
} else if (tex.startsWith("\\(") && tex.endsWith("\\)")) {
179
return tex;
180
} else {
181
return `$${tex}$`;
182
}
183
}
184
}
185
186
function processFormula(formula: string): string {
187
let tex = "";
188
// iterate over all lines in formula. save everything between the first ``` and last ``` in tex
189
let inCode = false;
190
for (const line of formula.split("\n")) {
191
if (line.startsWith("```")) {
192
inCode = !inCode;
193
} else if (inCode) {
194
tex += line + "\n";
195
}
196
}
197
// we found nothing -> the entire formula string is the tex code
198
if (!tex) {
199
tex = formula;
200
}
201
// if there are "\[" and "\]" in the formula, replace both by $$
202
if (tex.includes("\\[") && tex.includes("\\]")) {
203
tex = tex.replace(/\\\[|\\\]/g, "$$");
204
}
205
// similar, replace "\(" and "\)" by single $ signs
206
if (tex.includes("\\(") && tex.includes("\\)")) {
207
tex = tex.replace(/\\\(|\\\)/g, "$");
208
}
209
// if there are at least two $$ or $ in the tex, we extract the part between the first and second $ or $$
210
// This is necessary, because despite the prompt, some LLM return stuff like: "Here is the LaTeX formula: $$ ... $$."
211
for (const delimiter of ["$$", "$"]) {
212
const parts = tex.split(delimiter);
213
if (parts.length >= 3) {
214
tex = parts[1];
215
break;
216
}
217
}
218
setFormula(tex);
219
return tex;
220
}
221
222
async function doGenerate() {
223
try {
224
setError(undefined);
225
setGenerating(true);
226
setFormula("");
227
setFullReply("");
228
track("chatgpt", {
229
project_id,
230
tag: LLM_USAGE_TAG,
231
mode,
232
type: "generate",
233
model,
234
});
235
const { system, input, history } = getPrompt();
236
237
// Add prompt to history before generating
238
addPrompt(input);
239
240
const reply = await webapp_client.openai_client.query({
241
input,
242
history,
243
system,
244
model,
245
project_id,
246
tag: LLM_USAGE_TAG,
247
});
248
const tex = processFormula(reply);
249
// significant difference? Also show the full reply
250
if (reply.length > 2 * tex.length) {
251
setFullReply(reply);
252
} else {
253
setFullReply("");
254
}
255
} catch (err) {
256
setError(`${err}`);
257
} finally {
258
setGenerating(false);
259
}
260
}
261
262
// Start the query immediately, if the user had selected some text … and it's a free model
263
useEffect(() => {
264
if (text && isFreeModel(model, is_cocalc_com)) {
265
doGenerate();
266
}
267
}, [text]);
268
269
function renderTitle() {
270
return (
271
<>
272
<Title level={4}>
273
<AIAvatar size={20} />{" "}
274
<FormattedMessage
275
id="codemirror.extensions.ai_formula.title"
276
defaultMessage="Generate LaTeX Formula"
277
/>
278
</Title>
279
{enabled ? (
280
<>
281
{intl.formatMessage(dialogs.select_llm)}:{" "}
282
<LLMSelector
283
project_id={project_id}
284
model={model}
285
setModel={setModel}
286
/>
287
</>
288
) : undefined}
289
</>
290
);
291
}
292
293
function renderContent() {
294
const help = (
295
<HelpIcon title="Usage" extra="Help">
296
<FormattedMessage
297
id="codemirror.extensions.ai_formula.help"
298
defaultMessage={`
299
<p>You can enter the description of your desired formula in various ways:</p>
300
<ul>
301
<li>natural language: <code>drake equation</code>,</li>
302
<li>simple algebraic notation: <code>(a+b)^2 = a^2 + 2 a b + b^2</code>,</li>
303
<li>or a combination of both: <code>integral from 0 to infinity of (1+sin(x))/x^2 dx</code>.</li>
304
</ul>
305
<p>If the formula is not quite right, click "Generate" once again, try a different language model, or adjust the description.
306
Of course, you can also edit it as usual after you have inserted it.</p>
307
<p>Once you're happy, click the "Insert formula" button and the generated LaTeX formula will be inserted at the current cursor position.
308
The "Insert fully reply" button will, well, insert the entire answer.</p>
309
<p>Prior to opening this dialog, you can even select a portion of your text.
310
This will be used as your description and the AI language model will be queried immediately.
311
Inserting the formula will then replace the selected text.</p>`}
312
values={{
313
p: (children: any) => <Paragraph>{children}</Paragraph>,
314
ul: (children: any) => <ul>{children}</ul>,
315
li: (children: any) => <li>{children}</li>,
316
code: (children: any) => <Text code>{children}</Text>,
317
}}
318
/>
319
</HelpIcon>
320
);
321
return (
322
<Space direction="vertical" size="middle" style={{ width: "100%" }}>
323
<Paragraph style={{ marginBottom: 0 }}>
324
<FormattedMessage
325
id="codemirror.extensions.ai_formula.description"
326
defaultMessage="The {model} language model will generate a LaTeX formula based on your description. {help}"
327
values={{
328
model: <LLMModelName model={model} size={18} />,
329
help,
330
}}
331
/>
332
</Paragraph>
333
<div style={{ textAlign: "right" }}>
334
<LLMCostEstimation
335
// limited to 200, since we only get a formula – which is not a lengthy text!
336
maxOutputTokens={200}
337
model={model}
338
tokens={tokens}
339
type="secondary"
340
/>
341
</div>
342
<Space.Compact style={{ width: "100%" }}>
343
<Input
344
allowClear
345
disabled={generating}
346
placeholder={intl.formatMessage({
347
id: "codemirror.extensions.ai_formula.input_placeholder",
348
defaultMessage:
349
"Describe the formula in natural language and/or algebraic notation.",
350
})}
351
value={input}
352
onChange={(e) => setInput(e.target.value)}
353
onPressEnter={doGenerate}
354
addonBefore={<Icon name="fx" />}
355
/>
356
<LLMHistorySelector
357
prompts={historyPrompts}
358
onSelect={setInput}
359
disabled={generating}
360
/>
361
<Button
362
disabled={!input.trim() || generating}
363
loading={generating}
364
onClick={doGenerate}
365
type={formula ? "default" : "primary"}
366
>
367
{intl.formatMessage(labels.generate)}
368
</Button>
369
</Space.Compact>
370
{formula ? (
371
<Descriptions
372
size={"small"}
373
column={1}
374
bordered
375
items={[
376
{
377
key: "1",
378
label: "LaTeX",
379
children: <Paragraph code>{formula}</Paragraph>,
380
},
381
{
382
key: "2",
383
label: "Preview",
384
children: <Markdown value={wrapFormula(formula)} />,
385
},
386
...(fullReply
387
? [
388
{
389
key: "3",
390
label: "Full reply",
391
children: <Markdown value={fullReply} />,
392
},
393
]
394
: []),
395
]}
396
/>
397
) : undefined}
398
{error ? <Paragraph type="danger">{error}</Paragraph> : undefined}
399
{mode === "tex" ? (
400
<>
401
<Divider />
402
<Paragraph type="secondary">
403
<FormattedMessage
404
id="codemirror.extensions.ai_formula.amsmath_note"
405
defaultMessage="Note: You might have to ensure that <code>{amsmath_package}</code> is loaded in the preamble."
406
values={{
407
code: (children: any) => <code>{children}</code>,
408
amsmath_package: "\\usepackage{amsmath}",
409
}}
410
/>
411
</Paragraph>
412
</>
413
) : undefined}
414
</Space>
415
);
416
}
417
418
function renderButtons() {
419
return (
420
<Space.Compact>
421
<Button onClick={onCancel}>{intl.formatMessage(labels.cancel)}</Button>
422
<Button
423
type={"default"}
424
disabled={!fullReply}
425
onClick={() => cb(undefined, `\n\n${fullReply}\n\n`)}
426
>
427
{intl.formatMessage({
428
id: "codemirror.extensions.ai_formula.insert_full_reply_button",
429
defaultMessage: "Insert full reply",
430
})}
431
</Button>
432
<Button
433
type={formula ? "primary" : "default"}
434
disabled={!formula}
435
onClick={() => cb(undefined, wrapFormula(formula))}
436
>
437
{intl.formatMessage({
438
id: "codemirror.extensions.ai_formula.insert_formula_button",
439
defaultMessage: "Insert formula",
440
})}
441
</Button>
442
</Space.Compact>
443
);
444
}
445
446
function renderBody() {
447
if (!enabled) {
448
return (
449
<div>
450
<FormattedMessage
451
id="codemirror.extensions.ai_formula.disabled_message"
452
defaultMessage="AI language models are disabled."
453
/>
454
</div>
455
);
456
}
457
return renderContent();
458
}
459
460
function onCancel() {
461
cb(undefined, text);
462
}
463
464
return (
465
<Modal
466
title={renderTitle()}
467
open
468
footer={renderButtons()}
469
onCancel={onCancel}
470
width={{ xs: "90vw", sm: "90vw", md: "80vw", lg: "70vw", xl: "60vw" }}
471
>
472
{renderBody()}
473
</Modal>
474
);
475
}
476
477