Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.
Path: blob/master/src/packages/next/lib/api/post.ts
Views: 687
import basePath from "lib/base-path";1import LRU from "lru-cache";2import { join } from "path";34const VERSION = "v2";56export default async function apiPost(7path: string,8data?: object,9cache_s: number = 0 // if given, cache results for this many seconds to avoid overfetching10): Promise<any> {11let cache, key;12if (cache_s) {13cache = getCache(cache_s);14key = JSON.stringify({ path, data });15if (cache.has(key)) {16return cache.get(key);17}18}1920const response = await fetch(join(basePath, "api", VERSION, path), {21method: "POST",22headers: { "Content-Type": "application/json" },23body: JSON.stringify(data),24});25let result;26try {27result = await response.json();28if (result.error) {29// if error is set in response, then just throw exception (this greatly simplifies client code).30throw Error(result.error);31}32} catch (err) {33if (response.statusText == "Not Found") {34throw Error(`The API endpoint ${path} does not exist`);35}36throw err;37}38if (cache_s) {39cache.set(key, result);40}41return result;42}4344const caches: { [seconds: number]: LRU<string, object> } = {};4546function getCache(seconds: number) {47if (!caches[seconds]) {48caches[seconds] = new LRU<string, object>({49ttl: 1000 * seconds,50max: 200,51});52}53return caches[seconds];54}555657