Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Path: blob/master/src/packages/frontend/chat/llm-cost-estimation.tsx
Views: 687
import { Tooltip } from "antd";1import { useIntl } from "react-intl";23import { CSS } from "@cocalc/frontend/app-framework";4import { HelpIcon, Paragraph } from "@cocalc/frontend/components";5import {6ESTIMATION_HELP_TEXT,7MODEL_FREE_TO_USE,8} from "@cocalc/frontend/misc/llm-cost-estimation";910export function LLMCostEstimationChat({11costEstimate,12compact,13style,14}: {15costEstimate?: { min: number; max: number } | null;16compact: boolean; // only mean is shown17style?: CSS;18}) {19const intl = useIntl();2021if (!costEstimate) {22return null;23}2425const { min, max } = costEstimate;26const sum = min + max;27if (min == null || max == null || isNaN(sum)) return null;28const isFree = min === 0 && max === 0;29const range = (30<>31${min.toFixed(2)} - ${max.toFixed(2)}32</>33);34const cost = isFree ? (35<>Free</>36) : compact ? (37<Tooltip title={<>Estimated cost of calling the LLM: {range}</>}>38~${(sum / 2).toFixed(2)}39</Tooltip>40) : (41<>{range}</>42);4344return (45<Paragraph46type="secondary"47style={{48whiteSpace: "nowrap",49...style,50}}51>52{cost}{" "}53<HelpIcon title={"LLM Cost Estimation"} placement={"topLeft"}>54<Paragraph>55This chat message mentions a language model or replies in a thread.56This means, right after sending the message, the message and the57content of the current thread will be sent to the LLM for processing.58Then, the LLM will start replying to your message.59</Paragraph>60<Paragraph>61{isFree ? (62<>{intl.formatMessage(MODEL_FREE_TO_USE)}</>63) : (64<>65The estimate for this call is between ${min.toFixed(2)} and $66{max.toFixed(2)}.67</>68)}69</Paragraph>70{ESTIMATION_HELP_TEXT}71</HelpIcon>72</Paragraph>73);74}757677