Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Path: blob/master/src/packages/next/components/openai/vendor-status-check.tsx
Views: 687
import {1LLMServiceName,2getLLMServiceStatusCheckMD,3} from "@cocalc/util/db-schema/llm-utils";4import { unreachable } from "@cocalc/util/misc";5import A from "components/misc/A";67export function LLMServiceStatusCheck({8service,9}: {10service: LLMServiceName;11}): JSX.Element {12switch (service) {13case "openai":14return (15<>16OpenAI <A href="https://status.openai.com/">status</A> and{" "}17<A href="https://downdetector.com/status/openai/">downdetector</A>.18</>19);2021case "google":22return (23<>24Google <A href="https://status.cloud.google.com">status</A> and{" "}25<A href="https://downdetector.com/status/google-cloud">26downdetector27</A>28.29</>30);3132case "ollama":33return (34<>35This Ollama based API endpoint does not have a status page. If you are36experiencing issues you have to check with the API service directly or37try again later.38</>39);4041case "custom_openai":42return (43<>44This Custom OpenAI API endpoint does not have a status page. If you45are experiencing issues you have to check with the API service46directly or try again later.47</>48);4950case "mistralai":51return (52<>53This Mistral based API endpoint does not have a status page. If you54are experiencing issues, use another model or try again later.55</>56);5758case "anthropic":59return (60<>61Anthropic <A href="https://status.anthropic.com/">status</A>.62</>63);6465case "user":66return <>{getLLMServiceStatusCheckMD("user")}</>;6768default:69unreachable(service);70}71return <></>;72}737475