CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
sagemathinc

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.

GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/frontend/chat/llm-msg-summarize.tsx
Views: 687
1
/*
2
* This file is part of CoCalc: Copyright © 2024 Sagemath, Inc.
3
* License: MS-RSL – see LICENSE.md for details
4
*/
5
6
import { Button, Collapse, Switch } from "antd";
7
8
import { useLanguageModelSetting } from "@cocalc/frontend/account/useLanguageModelSetting";
9
import { useAsyncEffect, useState } from "@cocalc/frontend/app-framework";
10
import { Icon, Paragraph, RawPrompt } from "@cocalc/frontend/components";
11
import AIAvatar from "@cocalc/frontend/components/ai-avatar";
12
import PopconfirmKeyboard from "@cocalc/frontend/components/popconfirm-keyboard";
13
import LLMSelector, {
14
modelToName,
15
} from "@cocalc/frontend/frame-editors/llm/llm-selector";
16
import { LLMCostEstimation } from "@cocalc/frontend/misc/llm-cost-estimation";
17
import { useProjectContext } from "@cocalc/frontend/project/context";
18
import { COLORS } from "@cocalc/util/theme";
19
import { ChatActions } from "./actions";
20
import { ChatMessageTyped } from "./types";
21
22
export function SummarizeThread({
23
message,
24
actions,
25
}: {
26
message: ChatMessageTyped;
27
actions?: ChatActions;
28
}) {
29
const reply_to = message.get("reply_to");
30
const { project_id } = useProjectContext();
31
const [model, setModel] = useLanguageModelSetting(project_id);
32
const [visible, setVisible] = useState(false);
33
const [tokens, setTokens] = useState(0);
34
const [truncated, setTruncated] = useState(false);
35
const [short, setShort] = useState(true);
36
const [prompt, setPrompt] = useState<string>("");
37
38
useAsyncEffect(async () => {
39
// we do no do all the processing if the popconfirm is not visible
40
if (!visible) return;
41
42
const info = await actions?.summarizeThread({
43
model,
44
reply_to,
45
returnInfo: true,
46
short,
47
});
48
49
if (!info) return;
50
const { tokens, truncated, prompt } = info;
51
setTokens(tokens);
52
setTruncated(truncated);
53
setPrompt(prompt);
54
}, [visible, model, message, short]);
55
56
return (
57
<PopconfirmKeyboard
58
onVisibilityChange={setVisible}
59
icon={<AIAvatar size={16} />}
60
title={<>Summarize this thread</>}
61
description={() => (
62
<div style={{ maxWidth: "500px" }}>
63
<Paragraph>
64
<LLMSelector model={model} setModel={setModel} />
65
</Paragraph>
66
<Paragraph>
67
The conversation in this thread will be sent to the language model{" "}
68
{modelToName(model)}. It will then start a new thread and reply with
69
a {short ? "short" : "detailed"} summary of the conversation.
70
</Paragraph>
71
<Paragraph>
72
Summary length:{" "}
73
<Switch
74
checked={!short}
75
onChange={(v) => setShort(!v)}
76
unCheckedChildren={"short"}
77
checkedChildren={"detailed"}
78
/>
79
</Paragraph>
80
{truncated ? (
81
<Paragraph type="warning">
82
The conversion will be truncated. Consider selecting another
83
language model with a larger context window.
84
</Paragraph>
85
) : null}
86
<Collapse
87
items={[
88
{
89
key: "1",
90
label: (
91
<>Click to see what will be sent to {modelToName(model)}.</>
92
),
93
children: (
94
<RawPrompt
95
input={prompt}
96
style={{ border: "none", padding: "0", margin: "0" }}
97
/>
98
),
99
},
100
]}
101
/>
102
<LLMCostEstimation
103
model={model}
104
tokens={tokens}
105
paragraph={true}
106
type="secondary"
107
maxOutputTokens={short ? 200 : undefined}
108
/>
109
</div>
110
)}
111
onConfirm={() => actions?.summarizeThread({ model, reply_to, short })}
112
okText="Summarize"
113
>
114
<Button type="text" style={{ color: COLORS.GRAY_M }}>
115
<Icon name="vertical-align-middle" /> Summarize…
116
</Button>
117
</PopconfirmKeyboard>
118
);
119
}
120
121