Path: blob/main/AUTOMATIC1111_files/blocks.py
540 views
from __future__ import annotations12import copy3import inspect4import json5import os6import random7import secrets8import sys9import threading10import time11import warnings12import webbrowser13from abc import abstractmethod14from collections import defaultdict15from pathlib import Path16from types import ModuleType17from typing import TYPE_CHECKING, Any, AsyncIterator, Callable, Literal, cast1819import anyio20import requests21from anyio import CapacityLimiter22from gradio_client import serializing23from gradio_client import utils as client_utils24from gradio_client.documentation import document, set_documentation_group25from packaging import version2627from gradio import (28analytics,29components,30external,31networking,32queueing,33routes,34strings,35themes,36utils,37wasm_utils,38)39from gradio.context import Context40from gradio.deprecation import check_deprecated_parameters, warn_deprecation41from gradio.exceptions import (42DuplicateBlockError,43InvalidApiNameError,44InvalidBlockError,45)46from gradio.helpers import EventData, create_tracker, skip, special_args47from gradio.themes import Default as DefaultTheme48from gradio.themes import ThemeClass as Theme49from gradio.tunneling import (50BINARY_FILENAME,51BINARY_FOLDER,52BINARY_PATH,53BINARY_URL,54CURRENT_TUNNELS,55)56from gradio.utils import (57GRADIO_VERSION,58TupleNoPrint,59check_function_inputs_match,60component_or_layout_class,61concurrency_count_warning,62delete_none,63get_cancel_function,64get_continuous_fn,65)6667try:68import spaces # type: ignore69except Exception:70spaces = None7172set_documentation_group("blocks")7374if TYPE_CHECKING: # Only import for type checking (is False at runtime).75from fastapi.applications import FastAPI7677from gradio.components import Component7879BUILT_IN_THEMES: dict[str, Theme] = {80t.name: t81for t in [82themes.Base(),83themes.Default(),84themes.Monochrome(),85themes.Soft(),86themes.Glass(),87]88}899091class Block:92def __init__(93self,94*,95render: bool = True,96elem_id: str | None = None,97elem_classes: list[str] | str | None = None,98visible: bool = True,99root_url: str | None = None, # URL that is prepended to all file paths100_skip_init_processing: bool = False, # Used for loading from Spaces101**kwargs,102):103self._id = Context.id104Context.id += 1105self.visible = visible106self.elem_id = elem_id107self.elem_classes = (108[elem_classes] if isinstance(elem_classes, str) else elem_classes109)110self.root_url = root_url111self.share_token = secrets.token_urlsafe(32)112self._skip_init_processing = _skip_init_processing113self.parent: BlockContext | None = None114self.is_rendered: bool = False115116if render:117self.render()118check_deprecated_parameters(self.__class__.__name__, kwargs=kwargs)119120def render(self):121"""122Adds self into appropriate BlockContext123"""124if Context.root_block is not None and self._id in Context.root_block.blocks:125raise DuplicateBlockError(126f"A block with id: {self._id} has already been rendered in the current Blocks."127)128if Context.block is not None:129Context.block.add(self)130if Context.root_block is not None:131Context.root_block.blocks[self._id] = self132self.is_rendered = True133if isinstance(self, components.IOComponent):134Context.root_block.temp_file_sets.append(self.temp_files)135return self136137def unrender(self):138"""139Removes self from BlockContext if it has been rendered (otherwise does nothing).140Removes self from the layout and collection of blocks, but does not delete any event triggers.141"""142if Context.block is not None:143try:144Context.block.children.remove(self)145except ValueError:146pass147if Context.root_block is not None:148try:149del Context.root_block.blocks[self._id]150self.is_rendered = False151except KeyError:152pass153return self154155def get_block_name(self) -> str:156"""157Gets block's class name.158159If it is template component it gets the parent's class name.160161@return: class name162"""163return (164self.__class__.__base__.__name__.lower()165if hasattr(self, "is_template")166else self.__class__.__name__.lower()167)168169def get_expected_parent(self) -> type[BlockContext] | None:170return None171172def set_event_trigger(173self,174event_name: str,175fn: Callable | None,176inputs: Component | list[Component] | set[Component] | None,177outputs: Component | list[Component] | None,178preprocess: bool = True,179postprocess: bool = True,180scroll_to_output: bool = False,181show_progress: str = "full",182api_name: str | None | Literal[False] = None,183js: str | None = None,184no_target: bool = False,185queue: bool | None = None,186batch: bool = False,187max_batch_size: int = 4,188cancels: list[int] | None = None,189every: float | None = None,190collects_event_data: bool | None = None,191trigger_after: int | None = None,192trigger_only_on_success: bool = False,193) -> tuple[dict[str, Any], int]:194"""195Adds an event to the component's dependencies.196Parameters:197event_name: event name198fn: Callable function199inputs: input list200outputs: output list201preprocess: whether to run the preprocess methods of components202postprocess: whether to run the postprocess methods of components203scroll_to_output: whether to scroll to output of dependency on trigger204show_progress: whether to show progress animation while running.205api_name: defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, the endpoint will be exposed in the api docs as an unnamed endpoint, although this behavior will be changed in Gradio 4.0. If set to a string, the endpoint will be exposed in the api docs with the given name.206js: Experimental parameter (API may change): Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs' and 'outputs', return should be a list of values for output components207no_target: if True, sets "targets" to [], used for Blocks "load" event208queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.209batch: whether this function takes in a batch of inputs210max_batch_size: the maximum batch size to send to the function211cancels: a list of other events to cancel when this event is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method.212every: Run this event 'every' number of seconds while the client connection is open. Interpreted in seconds. Queue must be enabled.213collects_event_data: whether to collect event data for this event214trigger_after: if set, this event will be triggered after 'trigger_after' function index215trigger_only_on_success: if True, this event will only be triggered if the previous event was successful (only applies if `trigger_after` is set)216Returns: dependency information, dependency index217"""218# Support for singular parameter219if isinstance(inputs, set):220inputs_as_dict = True221inputs = sorted(inputs, key=lambda x: x._id)222else:223inputs_as_dict = False224if inputs is None:225inputs = []226elif not isinstance(inputs, list):227inputs = [inputs]228229if isinstance(outputs, set):230outputs = sorted(outputs, key=lambda x: x._id)231else:232if outputs is None:233outputs = []234elif not isinstance(outputs, list):235outputs = [outputs]236237if fn is not None and not cancels:238check_function_inputs_match(fn, inputs, inputs_as_dict)239240if Context.root_block is None:241raise AttributeError(242f"{event_name}() and other events can only be called within a Blocks context."243)244if every is not None and every <= 0:245raise ValueError("Parameter every must be positive or None")246if every and batch:247raise ValueError(248f"Cannot run {event_name} event in a batch and every {every} seconds. "249"Either batch is True or every is non-zero but not both."250)251252if every and fn:253fn = get_continuous_fn(fn, every)254elif every:255raise ValueError("Cannot set a value for `every` without a `fn`.")256257_, progress_index, event_data_index = (258special_args(fn) if fn else (None, None, None)259)260Context.root_block.fns.append(261BlockFunction(262fn,263inputs,264outputs,265preprocess,266postprocess,267inputs_as_dict,268progress_index is not None,269)270)271if api_name is not None and api_name is not False:272api_name_ = utils.append_unique_suffix(273api_name, [dep["api_name"] for dep in Context.root_block.dependencies]274)275if api_name != api_name_:276warnings.warn(f"api_name {api_name} already exists, using {api_name_}")277api_name = api_name_278279if collects_event_data is None:280collects_event_data = event_data_index is not None281282dependency = {283"targets": [self._id] if not no_target else [],284"trigger": event_name,285"inputs": [block._id for block in inputs],286"outputs": [block._id for block in outputs],287"backend_fn": fn is not None,288"js": js,289"queue": False if fn is None else queue,290"api_name": api_name,291"scroll_to_output": False if utils.get_space() else scroll_to_output,292"show_progress": show_progress,293"every": every,294"batch": batch,295"max_batch_size": max_batch_size,296"cancels": cancels or [],297"types": {298"continuous": bool(every),299"generator": inspect.isgeneratorfunction(fn) or bool(every),300},301"collects_event_data": collects_event_data,302"trigger_after": trigger_after,303"trigger_only_on_success": trigger_only_on_success,304}305Context.root_block.dependencies.append(dependency)306return dependency, len(Context.root_block.dependencies) - 1307308def get_config(self):309return {310"visible": self.visible,311"elem_id": self.elem_id,312"elem_classes": self.elem_classes,313"root_url": self.root_url,314}315316@staticmethod317@abstractmethod318def update(**kwargs) -> dict:319return {}320321@classmethod322def get_specific_update(cls, generic_update: dict[str, Any]) -> dict:323generic_update = generic_update.copy()324del generic_update["__type__"]325specific_update = cls.update(**generic_update)326return specific_update327328329class BlockContext(Block):330def __init__(331self,332visible: bool = True,333render: bool = True,334**kwargs,335):336"""337Parameters:338visible: If False, this will be hidden but included in the Blocks config file (its visibility can later be updated).339render: If False, this will not be included in the Blocks config file at all.340"""341self.children: list[Block] = []342Block.__init__(self, visible=visible, render=render, **kwargs)343344def add_child(self, child: Block):345self.children.append(child)346347def __enter__(self):348self.parent = Context.block349Context.block = self350return self351352def add(self, child: Block):353child.parent = self354self.children.append(child)355356def fill_expected_parents(self):357children = []358pseudo_parent = None359for child in self.children:360expected_parent = child.get_expected_parent()361if not expected_parent or isinstance(self, expected_parent):362pseudo_parent = None363children.append(child)364else:365if pseudo_parent is not None and isinstance(366pseudo_parent, expected_parent367):368pseudo_parent.add_child(child)369else:370pseudo_parent = expected_parent(render=False)371pseudo_parent.parent = self372children.append(pseudo_parent)373pseudo_parent.add_child(child)374if Context.root_block:375Context.root_block.blocks[pseudo_parent._id] = pseudo_parent376child.parent = pseudo_parent377self.children = children378379def __exit__(self, *args):380if getattr(self, "allow_expected_parents", True):381self.fill_expected_parents()382Context.block = self.parent383384def postprocess(self, y):385"""386Any postprocessing needed to be performed on a block context.387"""388return y389390391class BlockFunction:392def __init__(393self,394fn: Callable | None,395inputs: list[Component],396outputs: list[Component],397preprocess: bool,398postprocess: bool,399inputs_as_dict: bool,400tracks_progress: bool = False,401):402self.fn = fn403self.inputs = inputs404self.outputs = outputs405self.preprocess = preprocess406self.postprocess = postprocess407self.tracks_progress = tracks_progress408self.total_runtime = 0409self.total_runs = 0410self.inputs_as_dict = inputs_as_dict411self.name = getattr(fn, "__name__", "fn") if fn is not None else None412self.spaces_auto_wrap()413414def spaces_auto_wrap(self):415if spaces is None:416return417if utils.get_space() is None:418return419self.fn = spaces.gradio_auto_wrap(self.fn)420421def __str__(self):422return str(423{424"fn": self.name,425"preprocess": self.preprocess,426"postprocess": self.postprocess,427}428)429430def __repr__(self):431return str(self)432433434class class_or_instancemethod(classmethod): # noqa: N801435def __get__(self, instance, type_):436descr_get = super().__get__ if instance is None else self.__func__.__get__437return descr_get(instance, type_)438439440def postprocess_update_dict(block: Block, update_dict: dict, postprocess: bool = True):441"""442Converts a dictionary of updates into a format that can be sent to the frontend.443E.g. {"__type__": "generic_update", "value": "2", "interactive": False}444Into -> {"__type__": "update", "value": 2.0, "mode": "static"}445446Parameters:447block: The Block that is being updated with this update dictionary.448update_dict: The original update dictionary449postprocess: Whether to postprocess the "value" key of the update dictionary.450"""451if update_dict.get("__type__", "") == "generic_update":452update_dict = block.get_specific_update(update_dict)453if update_dict.get("value") is components._Keywords.NO_VALUE:454update_dict.pop("value")455interactive = update_dict.pop("interactive", None)456if interactive is not None:457update_dict["mode"] = "dynamic" if interactive else "static"458prediction_value = delete_none(update_dict, skip_value=True)459if "value" in prediction_value and postprocess:460assert isinstance(461block, components.IOComponent462), f"Component {block.__class__} does not support value"463prediction_value["value"] = block.postprocess(prediction_value["value"])464return prediction_value465466467def convert_component_dict_to_list(468outputs_ids: list[int], predictions: dict469) -> list | dict:470"""471Converts a dictionary of component updates into a list of updates in the order of472the outputs_ids and including every output component. Leaves other types of dictionaries unchanged.473E.g. {"textbox": "hello", "number": {"__type__": "generic_update", "value": "2"}}474Into -> ["hello", {"__type__": "generic_update"}, {"__type__": "generic_update", "value": "2"}]475"""476keys_are_blocks = [isinstance(key, Block) for key in predictions]477if all(keys_are_blocks):478reordered_predictions = [skip() for _ in outputs_ids]479for component, value in predictions.items():480if component._id not in outputs_ids:481raise ValueError(482f"Returned component {component} not specified as output of function."483)484output_index = outputs_ids.index(component._id)485reordered_predictions[output_index] = value486predictions = utils.resolve_singleton(reordered_predictions)487elif any(keys_are_blocks):488raise ValueError(489"Returned dictionary included some keys as Components. Either all keys must be Components to assign Component values, or return a List of values to assign output values in order."490)491return predictions492493494def get_api_info(config: dict, serialize: bool = True):495"""496Gets the information needed to generate the API docs from a Blocks config.497Parameters:498config: a Blocks config dictionary499serialize: If True, returns the serialized version of the typed information. If False, returns the raw version.500"""501api_info = {"named_endpoints": {}, "unnamed_endpoints": {}}502mode = config.get("mode", None)503after_new_format = version.parse(config.get("version", "2.0")) > version.Version(504"3.28.3"505)506507for d, dependency in enumerate(config["dependencies"]):508dependency_info = {"parameters": [], "returns": []}509skip_endpoint = False510511inputs = dependency["inputs"]512for i in inputs:513for component in config["components"]:514if component["id"] == i:515break516else:517skip_endpoint = True # if component not found, skip endpoint518break519type = component["type"]520if type in client_utils.SKIP_COMPONENTS:521continue522if (523not component.get("serializer")524and type not in serializing.COMPONENT_MAPPING525):526skip_endpoint = True # if component not serializable, skip endpoint527break528if type in client_utils.SKIP_COMPONENTS:529continue530label = component["props"].get("label", f"parameter_{i}")531# The config has the most specific API info (taking into account the parameters532# of the component), so we use that if it exists. Otherwise, we fallback to the533# Serializer's API info.534serializer = serializing.COMPONENT_MAPPING[type]()535if component.get("api_info") and after_new_format:536info = component["api_info"]537example = component["example_inputs"]["serialized"]538else:539assert isinstance(serializer, serializing.Serializable)540info = serializer.api_info()541example = serializer.example_inputs()["raw"]542python_info = info["info"]543if serialize and info["serialized_info"]:544python_info = serializer.serialized_info()545if (546isinstance(serializer, serializing.FileSerializable)547and component["props"].get("file_count", "single") != "single"548):549python_info = serializer._multiple_file_serialized_info()550551python_type = client_utils.json_schema_to_python_type(python_info)552serializer_name = serializing.COMPONENT_MAPPING[type].__name__553dependency_info["parameters"].append(554{555"label": label,556"type": info["info"],557"python_type": {558"type": python_type,559"description": python_info.get("description", ""),560},561"component": type.capitalize(),562"example_input": example,563"serializer": serializer_name,564}565)566567outputs = dependency["outputs"]568for o in outputs:569for component in config["components"]:570if component["id"] == o:571break572else:573skip_endpoint = True # if component not found, skip endpoint574break575type = component["type"]576if type in client_utils.SKIP_COMPONENTS:577continue578if (579not component.get("serializer")580and type not in serializing.COMPONENT_MAPPING581):582skip_endpoint = True # if component not serializable, skip endpoint583break584label = component["props"].get("label", f"value_{o}")585serializer = serializing.COMPONENT_MAPPING[type]()586if component.get("api_info") and after_new_format:587info = component["api_info"]588example = component["example_inputs"]["serialized"]589else:590assert isinstance(serializer, serializing.Serializable)591info = serializer.api_info()592example = serializer.example_inputs()["raw"]593python_info = info["info"]594if serialize and info["serialized_info"]:595python_info = serializer.serialized_info()596if (597isinstance(serializer, serializing.FileSerializable)598and component["props"].get("file_count", "single") != "single"599):600python_info = serializer._multiple_file_serialized_info()601python_type = client_utils.json_schema_to_python_type(python_info)602serializer_name = serializing.COMPONENT_MAPPING[type].__name__603dependency_info["returns"].append(604{605"label": label,606"type": info["info"],607"python_type": {608"type": python_type,609"description": python_info.get("description", ""),610},611"component": type.capitalize(),612"serializer": serializer_name,613}614)615616if not dependency["backend_fn"]:617skip_endpoint = True618619if skip_endpoint:620continue621if dependency["api_name"] is not None and dependency["api_name"] is not False:622api_info["named_endpoints"][f"/{dependency['api_name']}"] = dependency_info623elif (624dependency["api_name"] is False625or mode == "interface"626or mode == "tabbed_interface"627):628pass # Skip unnamed endpoints in interface mode629else:630api_info["unnamed_endpoints"][str(d)] = dependency_info631632return api_info633634635@document("launch", "queue", "integrate", "load")636class Blocks(BlockContext):637"""638Blocks is Gradio's low-level API that allows you to create more custom web639applications and demos than Interfaces (yet still entirely in Python).640641642Compared to the Interface class, Blocks offers more flexibility and control over:643(1) the layout of components (2) the events that644trigger the execution of functions (3) data flows (e.g. inputs can trigger outputs,645which can trigger the next level of outputs). Blocks also offers ways to group646together related demos such as with tabs.647648649The basic usage of Blocks is as follows: create a Blocks object, then use it as a650context (with the "with" statement), and then define layouts, components, or events651within the Blocks context. Finally, call the launch() method to launch the demo.652653Example:654import gradio as gr655def update(name):656return f"Welcome to Gradio, {name}!"657658with gr.Blocks() as demo:659gr.Markdown("Start typing below and then click **Run** to see the output.")660with gr.Row():661inp = gr.Textbox(placeholder="What is your name?")662out = gr.Textbox()663btn = gr.Button("Run")664btn.click(fn=update, inputs=inp, outputs=out)665666demo.launch()667Demos: blocks_hello, blocks_flipper, blocks_speech_text_sentiment, generate_english_german, sound_alert668Guides: blocks-and-event-listeners, controlling-layout, state-in-blocks, custom-CSS-and-JS, custom-interpretations-with-blocks, using-blocks-like-functions669"""670671def __init__(672self,673theme: Theme | str | None = None,674analytics_enabled: bool | None = None,675mode: str = "blocks",676title: str = "Gradio",677css: str | None = None,678**kwargs,679):680"""681Parameters:682theme: a Theme object or a string representing a theme. If a string, will look for a built-in theme with that name (e.g. "soft" or "default"), or will attempt to load a theme from the HF Hub (e.g. "gradio/monochrome"). If None, will use the Default theme.683analytics_enabled: whether to allow basic telemetry. If None, will use GRADIO_ANALYTICS_ENABLED environment variable or default to True.684mode: a human-friendly name for the kind of Blocks or Interface being created.685title: The tab title to display when this is opened in a browser window.686css: custom css or path to custom css file to apply to entire Blocks687"""688self.limiter = None689if theme is None:690theme = DefaultTheme()691elif isinstance(theme, str):692if theme.lower() in BUILT_IN_THEMES:693theme = BUILT_IN_THEMES[theme.lower()]694else:695try:696theme = Theme.from_hub(theme)697except Exception as e:698warnings.warn(f"Cannot load {theme}. Caught Exception: {str(e)}")699theme = DefaultTheme()700if not isinstance(theme, Theme):701warnings.warn("Theme should be a class loaded from gradio.themes")702theme = DefaultTheme()703self.theme: Theme = theme704self.theme_css = theme._get_theme_css()705self.stylesheets = theme._stylesheets706self.encrypt = False707self.share = False708self.enable_queue = None709self.max_threads = 40710self.pending_streams = defaultdict(dict)711self.show_error = True712if css is not None and os.path.exists(css):713with open(css) as css_file:714self.css = css_file.read()715else:716self.css = css717718# For analytics_enabled and allow_flagging: (1) first check for719# parameter, (2) check for env variable, (3) default to True/"manual"720self.analytics_enabled = (721analytics_enabled722if analytics_enabled is not None723else analytics.analytics_enabled()724)725if self.analytics_enabled:726if not wasm_utils.IS_WASM:727t = threading.Thread(target=analytics.version_check)728t.start()729else:730os.environ["HF_HUB_DISABLE_TELEMETRY"] = "True"731super().__init__(render=False, **kwargs)732self.blocks: dict[int, Block] = {}733self.fns: list[BlockFunction] = []734self.dependencies = []735self.mode = mode736737self.is_running = False738self.local_url = None739self.share_url = None740self.width = None741self.height = None742self.api_open = True743744self.space_id = utils.get_space()745self.favicon_path = None746self.auth = None747self.dev_mode = True748self.app_id = random.getrandbits(64)749self.temp_file_sets = []750self.title = title751self.show_api = True752753# Only used when an Interface is loaded from a config754self.predict = None755self.input_components = None756self.output_components = None757self.__name__ = None758self.api_mode = None759self.progress_tracking = None760self.ssl_verify = True761762self.allowed_paths = []763self.blocked_paths = []764self.root_path = os.environ.get("GRADIO_ROOT_PATH", "")765self.root_urls = set()766767if self.analytics_enabled:768is_custom_theme = not any(769self.theme.to_dict() == built_in_theme.to_dict()770for built_in_theme in BUILT_IN_THEMES.values()771)772data = {773"mode": self.mode,774"custom_css": self.css is not None,775"theme": self.theme.name,776"is_custom_theme": is_custom_theme,777"version": GRADIO_VERSION,778}779analytics.initiated_analytics(data)780781@classmethod782def from_config(783cls,784config: dict,785fns: list[Callable],786root_url: str,787) -> Blocks:788"""789Factory method that creates a Blocks from a config and list of functions. Used790internally by the gradio.external.load() method.791792Parameters:793config: a dictionary containing the configuration of the Blocks.794fns: a list of functions that are used in the Blocks. Must be in the same order as the dependencies in the config.795root_url: an external url to use as a root URL when serving files for components in the Blocks.796"""797config = copy.deepcopy(config)798components_config = config["components"]799for component_config in components_config:800# for backwards compatibility, extract style into props801if "style" in component_config["props"]:802component_config["props"].update(component_config["props"]["style"])803del component_config["props"]["style"]804theme = config.get("theme", "default")805original_mapping: dict[int, Block] = {}806root_urls = {root_url}807808def get_block_instance(id: int) -> Block:809for block_config in components_config:810if block_config["id"] == id:811break812else:813raise ValueError(f"Cannot find block with id {id}")814cls = component_or_layout_class(block_config["type"])815block_config["props"].pop("type", None)816block_config["props"].pop("name", None)817# If a Gradio app B is loaded into a Gradio app A, and B itself loads a818# Gradio app C, then the root_urls of the components in A need to be the819# URL of C, not B. The else clause below handles this case.820if block_config["props"].get("root_url") is None:821block_config["props"]["root_url"] = f"{root_url}/"822else:823root_urls.add(block_config["props"]["root_url"])824# Any component has already processed its initial value, so we skip that step here825block = cls(**block_config["props"], _skip_init_processing=True)826return block827828def iterate_over_children(children_list):829for child_config in children_list:830id = child_config["id"]831block = get_block_instance(id)832833original_mapping[id] = block834835children = child_config.get("children")836if children is not None:837assert isinstance(838block, BlockContext839), f"Invalid config, Block with id {id} has children but is not a BlockContext."840with block:841iterate_over_children(children)842843derived_fields = ["types"]844845with Blocks(theme=theme) as blocks:846# ID 0 should be the root Blocks component847original_mapping[0] = Context.root_block or blocks848849iterate_over_children(config["layout"]["children"])850851first_dependency = None852853# add the event triggers854for dependency, fn in zip(config["dependencies"], fns):855# We used to add a "fake_event" to the config to cache examples856# without removing it. This was causing bugs in calling gr.load857# We fixed the issue by removing "fake_event" from the config in examples.py858# but we still need to skip these events when loading the config to support859# older demos860if dependency["trigger"] == "fake_event":861continue862for field in derived_fields:863dependency.pop(field, None)864targets = dependency.pop("targets")865trigger = dependency.pop("trigger")866dependency.pop("backend_fn")867dependency.pop("documentation", None)868dependency["inputs"] = [869original_mapping[i] for i in dependency["inputs"]870]871dependency["outputs"] = [872original_mapping[o] for o in dependency["outputs"]873]874dependency.pop("status_tracker", None)875dependency["preprocess"] = False876dependency["postprocess"] = False877878for target in targets:879dependency = original_mapping[target].set_event_trigger(880event_name=trigger, fn=fn, **dependency881)[0]882if first_dependency is None:883first_dependency = dependency884885# Allows some use of Interface-specific methods with loaded Spaces886if first_dependency and Context.root_block:887blocks.predict = [fns[0]]888blocks.input_components = [889Context.root_block.blocks[i] for i in first_dependency["inputs"]890]891blocks.output_components = [892Context.root_block.blocks[o] for o in first_dependency["outputs"]893]894blocks.__name__ = "Interface"895blocks.api_mode = True896897blocks.root_urls = root_urls898return blocks899900def __str__(self):901return self.__repr__()902903def __repr__(self):904num_backend_fns = len([d for d in self.dependencies if d["backend_fn"]])905repr = f"Gradio Blocks instance: {num_backend_fns} backend functions"906repr += f"\n{'-' * len(repr)}"907for d, dependency in enumerate(self.dependencies):908if dependency["backend_fn"]:909repr += f"\nfn_index={d}"910repr += "\n inputs:"911for input_id in dependency["inputs"]:912block = self.blocks[input_id]913repr += f"\n |-{block}"914repr += "\n outputs:"915for output_id in dependency["outputs"]:916block = self.blocks[output_id]917repr += f"\n |-{block}"918return repr919920@property921def expects_oauth(self):922"""Return whether the app expects user to authenticate via OAuth."""923return any(924isinstance(block, (components.LoginButton, components.LogoutButton))925for block in self.blocks.values()926)927928def render(self):929if Context.root_block is not None:930if self._id in Context.root_block.blocks:931raise DuplicateBlockError(932f"A block with id: {self._id} has already been rendered in the current Blocks."933)934overlapping_ids = set(Context.root_block.blocks).intersection(self.blocks)935for id in overlapping_ids:936# State components are allowed to be reused between Blocks937if not isinstance(self.blocks[id], components.State):938raise DuplicateBlockError(939"At least one block in this Blocks has already been rendered."940)941942Context.root_block.blocks.update(self.blocks)943Context.root_block.fns.extend(self.fns)944dependency_offset = len(Context.root_block.dependencies)945for i, dependency in enumerate(self.dependencies):946api_name = dependency["api_name"]947if api_name is not None and api_name is not False:948api_name_ = utils.append_unique_suffix(949api_name,950[dep["api_name"] for dep in Context.root_block.dependencies],951)952if api_name != api_name_:953warnings.warn(954f"api_name {api_name} already exists, using {api_name_}"955)956dependency["api_name"] = api_name_957dependency["cancels"] = [958c + dependency_offset for c in dependency["cancels"]959]960if dependency.get("trigger_after") is not None:961dependency["trigger_after"] += dependency_offset962# Recreate the cancel function so that it has the latest963# dependency fn indices. This is necessary to properly cancel964# events in the backend965if dependency["cancels"]:966updated_cancels = [967Context.root_block.dependencies[i]968for i in dependency["cancels"]969]970new_fn = BlockFunction(971get_cancel_function(updated_cancels)[0],972[],973[],974False,975True,976False,977)978Context.root_block.fns[dependency_offset + i] = new_fn979Context.root_block.dependencies.append(dependency)980Context.root_block.temp_file_sets.extend(self.temp_file_sets)981Context.root_block.root_urls.update(self.root_urls)982983if Context.block is not None:984Context.block.children.extend(self.children)985return self986987def is_callable(self, fn_index: int = 0) -> bool:988"""Checks if a particular Blocks function is callable (i.e. not stateful or a generator)."""989block_fn = self.fns[fn_index]990dependency = self.dependencies[fn_index]991992if inspect.isasyncgenfunction(block_fn.fn):993return False994if inspect.isgeneratorfunction(block_fn.fn):995return False996for input_id in dependency["inputs"]:997block = self.blocks[input_id]998if getattr(block, "stateful", False):999return False1000for output_id in dependency["outputs"]:1001block = self.blocks[output_id]1002if getattr(block, "stateful", False):1003return False10041005return True10061007def __call__(self, *inputs, fn_index: int = 0, api_name: str | None = None):1008"""1009Allows Blocks objects to be called as functions. Supply the parameters to the1010function as positional arguments. To choose which function to call, use the1011fn_index parameter, which must be a keyword argument.10121013Parameters:1014*inputs: the parameters to pass to the function1015fn_index: the index of the function to call (defaults to 0, which for Interfaces, is the default prediction function)1016api_name: The api_name of the dependency to call. Will take precedence over fn_index.1017"""1018if api_name is not None:1019inferred_fn_index = next(1020(1021i1022for i, d in enumerate(self.dependencies)1023if d.get("api_name") == api_name1024),1025None,1026)1027if inferred_fn_index is None:1028raise InvalidApiNameError(1029f"Cannot find a function with api_name {api_name}"1030)1031fn_index = inferred_fn_index1032if not (self.is_callable(fn_index)):1033raise ValueError(1034"This function is not callable because it is either stateful or is a generator. Please use the .launch() method instead to create an interactive user interface."1035)10361037inputs = list(inputs)1038processed_inputs = self.serialize_data(fn_index, inputs)1039batch = self.dependencies[fn_index]["batch"]1040if batch:1041processed_inputs = [[inp] for inp in processed_inputs]10421043outputs = client_utils.synchronize_async(1044self.process_api,1045fn_index=fn_index,1046inputs=processed_inputs,1047request=None,1048state={},1049)1050outputs = outputs["data"]10511052if batch:1053outputs = [out[0] for out in outputs]10541055processed_outputs = self.deserialize_data(fn_index, outputs)1056processed_outputs = utils.resolve_singleton(processed_outputs)10571058return processed_outputs10591060async def call_function(1061self,1062fn_index: int,1063processed_input: list[Any],1064iterator: AsyncIterator[Any] | None = None,1065requests: routes.Request | list[routes.Request] | None = None,1066event_id: str | None = None,1067event_data: EventData | None = None,1068):1069"""1070Calls function with given index and preprocessed input, and measures process time.1071Parameters:1072fn_index: index of function to call1073processed_input: preprocessed input to pass to function1074iterator: iterator to use if function is a generator1075requests: requests to pass to function1076event_id: id of event in queue1077event_data: data associated with event trigger1078"""1079block_fn = self.fns[fn_index]1080assert block_fn.fn, f"function with index {fn_index} not defined."1081is_generating = False1082request = requests[0] if isinstance(requests, list) else requests1083start = time.time()1084fn = utils.get_function_with_locals(block_fn.fn, self, event_id)10851086if iterator is None: # If not a generator function that has already run1087if block_fn.inputs_as_dict:1088processed_input = [dict(zip(block_fn.inputs, processed_input))]10891090processed_input, progress_index, _ = special_args(1091block_fn.fn, processed_input, request, event_data1092)1093progress_tracker = (1094processed_input[progress_index] if progress_index is not None else None1095)10961097if progress_tracker is not None and progress_index is not None:1098progress_tracker, fn = create_tracker(1099self, event_id, fn, progress_tracker.track_tqdm1100)1101processed_input[progress_index] = progress_tracker11021103if inspect.iscoroutinefunction(fn):1104prediction = await fn(*processed_input)1105else:1106prediction = await anyio.to_thread.run_sync(1107fn, *processed_input, limiter=self.limiter1108)1109else:1110prediction = None11111112if inspect.isgeneratorfunction(fn) or inspect.isasyncgenfunction(fn):1113if not self.enable_queue:1114raise ValueError("Need to enable queue to use generators.")1115try:1116if iterator is None:1117iterator = cast(AsyncIterator[Any], prediction)1118if inspect.isgenerator(iterator):1119iterator = utils.SyncToAsyncIterator(iterator, self.limiter)1120prediction = await utils.async_iteration(iterator)1121is_generating = True1122except StopAsyncIteration:1123n_outputs = len(self.dependencies[fn_index].get("outputs"))1124prediction = (1125components._Keywords.FINISHED_ITERATING1126if n_outputs == 11127else (components._Keywords.FINISHED_ITERATING,) * n_outputs1128)1129iterator = None11301131duration = time.time() - start11321133return {1134"prediction": prediction,1135"duration": duration,1136"is_generating": is_generating,1137"iterator": iterator,1138}11391140def serialize_data(self, fn_index: int, inputs: list[Any]) -> list[Any]:1141dependency = self.dependencies[fn_index]1142processed_input = []11431144for i, input_id in enumerate(dependency["inputs"]):1145try:1146block = self.blocks[input_id]1147except KeyError as e:1148raise InvalidBlockError(1149f"Input component with id {input_id} used in {dependency['trigger']}() event is not defined in this gr.Blocks context. You are allowed to nest gr.Blocks contexts, but there must be a gr.Blocks context that contains all components and events."1150) from e1151assert isinstance(1152block, components.IOComponent1153), f"{block.__class__} Component with id {input_id} not a valid input component."1154serialized_input = block.serialize(inputs[i])1155processed_input.append(serialized_input)11561157return processed_input11581159def deserialize_data(self, fn_index: int, outputs: list[Any]) -> list[Any]:1160dependency = self.dependencies[fn_index]1161predictions = []11621163for o, output_id in enumerate(dependency["outputs"]):1164try:1165block = self.blocks[output_id]1166except KeyError as e:1167raise InvalidBlockError(1168f"Output component with id {output_id} used in {dependency['trigger']}() event not found in this gr.Blocks context. You are allowed to nest gr.Blocks contexts, but there must be a gr.Blocks context that contains all components and events."1169) from e1170assert isinstance(1171block, components.IOComponent1172), f"{block.__class__} Component with id {output_id} not a valid output component."1173deserialized = block.deserialize(1174outputs[o],1175save_dir=block.DEFAULT_TEMP_DIR,1176root_url=block.root_url,1177hf_token=Context.hf_token,1178)1179predictions.append(deserialized)11801181return predictions11821183def validate_inputs(self, fn_index: int, inputs: list[Any]):1184block_fn = self.fns[fn_index]1185dependency = self.dependencies[fn_index]11861187dep_inputs = dependency["inputs"]11881189# This handles incorrect inputs when args are changed by a JS function1190# Only check not enough args case, ignore extra arguments (for now)1191# TODO: make this stricter?1192if len(inputs) < len(dep_inputs):1193name = (1194f" ({block_fn.name})"1195if block_fn.name and block_fn.name != "<lambda>"1196else ""1197)11981199wanted_args = []1200received_args = []1201for input_id in dep_inputs:1202block = self.blocks[input_id]1203wanted_args.append(str(block))1204for inp in inputs:1205v = f'"{inp}"' if isinstance(inp, str) else str(inp)1206received_args.append(v)12071208wanted = ", ".join(wanted_args)1209received = ", ".join(received_args)12101211# JS func didn't pass enough arguments1212raise ValueError(1213f"""An event handler{name} didn't receive enough input values (needed: {len(dep_inputs)}, got: {len(inputs)}).1214Check if the event handler calls a Javascript function, and make sure its return value is correct.1215Wanted inputs:1216[{wanted}]1217Received inputs:1218[{received}]"""1219)12201221def preprocess_data(self, fn_index: int, inputs: list[Any], state: dict[int, Any]):1222block_fn = self.fns[fn_index]1223dependency = self.dependencies[fn_index]12241225self.validate_inputs(fn_index, inputs)12261227if block_fn.preprocess:1228processed_input = []1229for i, input_id in enumerate(dependency["inputs"]):1230try:1231block = self.blocks[input_id]1232except KeyError as e:1233raise InvalidBlockError(1234f"Input component with id {input_id} used in {dependency['trigger']}() event not found in this gr.Blocks context. You are allowed to nest gr.Blocks contexts, but there must be a gr.Blocks context that contains all components and events."1235) from e1236assert isinstance(1237block, components.Component1238), f"{block.__class__} Component with id {input_id} not a valid input component."1239if getattr(block, "stateful", False):1240processed_input.append(state.get(input_id))1241else:1242processed_input.append(block.preprocess(inputs[i]))1243else:1244processed_input = inputs1245return processed_input12461247def validate_outputs(self, fn_index: int, predictions: Any | list[Any]):1248block_fn = self.fns[fn_index]1249dependency = self.dependencies[fn_index]12501251dep_outputs = dependency["outputs"]12521253if type(predictions) is not list and type(predictions) is not tuple:1254predictions = [predictions]12551256if len(predictions) < len(dep_outputs):1257name = (1258f" ({block_fn.name})"1259if block_fn.name and block_fn.name != "<lambda>"1260else ""1261)12621263wanted_args = []1264received_args = []1265for output_id in dep_outputs:1266block = self.blocks[output_id]1267wanted_args.append(str(block))1268for pred in predictions:1269v = f'"{pred}"' if isinstance(pred, str) else str(pred)1270received_args.append(v)12711272wanted = ", ".join(wanted_args)1273received = ", ".join(received_args)12741275raise ValueError(1276f"""An event handler{name} didn't receive enough output values (needed: {len(dep_outputs)}, received: {len(predictions)}).1277Wanted outputs:1278[{wanted}]1279Received outputs:1280[{received}]"""1281)12821283def postprocess_data(1284self, fn_index: int, predictions: list | dict, state: dict[int, Any]1285):1286block_fn = self.fns[fn_index]1287dependency = self.dependencies[fn_index]1288batch = dependency["batch"]12891290if type(predictions) is dict and len(predictions) > 0:1291predictions = convert_component_dict_to_list(1292dependency["outputs"], predictions1293)12941295if len(dependency["outputs"]) == 1 and not (batch):1296predictions = [1297predictions,1298]12991300self.validate_outputs(fn_index, predictions) # type: ignore13011302output = []1303for i, output_id in enumerate(dependency["outputs"]):1304try:1305if predictions[i] is components._Keywords.FINISHED_ITERATING:1306output.append(None)1307continue1308except (IndexError, KeyError) as err:1309raise ValueError(1310"Number of output components does not match number "1311f"of values returned from from function {block_fn.name}"1312) from err13131314try:1315block = self.blocks[output_id]1316except KeyError as e:1317raise InvalidBlockError(1318f"Output component with id {output_id} used in {dependency['trigger']}() event not found in this gr.Blocks context. You are allowed to nest gr.Blocks contexts, but there must be a gr.Blocks context that contains all components and events."1319) from e13201321if getattr(block, "stateful", False):1322if not utils.is_update(predictions[i]):1323state[output_id] = predictions[i]1324output.append(None)1325else:1326prediction_value = predictions[i]1327if utils.is_update(prediction_value):1328assert isinstance(prediction_value, dict)1329prediction_value = postprocess_update_dict(1330block=block,1331update_dict=prediction_value,1332postprocess=block_fn.postprocess,1333)1334elif block_fn.postprocess:1335assert isinstance(1336block, components.Component1337), f"{block.__class__} Component with id {output_id} not a valid output component."1338prediction_value = block.postprocess(prediction_value)1339output.append(prediction_value)13401341return output13421343def handle_streaming_outputs(1344self,1345fn_index: int,1346data: list,1347session_hash: str | None,1348run: int | None,1349) -> list:1350if session_hash is None or run is None:1351return data1352if run not in self.pending_streams[session_hash]:1353self.pending_streams[session_hash][run] = {}1354stream_run = self.pending_streams[session_hash][run]13551356from gradio.events import StreamableOutput13571358for i, output_id in enumerate(self.dependencies[fn_index]["outputs"]):1359block = self.blocks[output_id]1360if isinstance(block, StreamableOutput) and block.streaming:1361first_chunk = output_id not in stream_run1362binary_data, output_data = block.stream_output(1363data[i], f"{session_hash}/{run}/{output_id}", first_chunk1364)1365if first_chunk:1366stream_run[output_id] = []1367self.pending_streams[session_hash][run][output_id].append(binary_data)1368data[i] = output_data1369return data13701371async def process_api(1372self,1373fn_index: int,1374inputs: list[Any],1375state: dict[int, Any],1376request: routes.Request | list[routes.Request] | None = None,1377iterators: dict[int, Any] | None = None,1378session_hash: str | None = None,1379event_id: str | None = None,1380event_data: EventData | None = None,1381) -> dict[str, Any]:1382"""1383Processes API calls from the frontend. First preprocesses the data,1384then runs the relevant function, then postprocesses the output.1385Parameters:1386fn_index: Index of function to run.1387inputs: input data received from the frontend1388state: data stored from stateful components for session (key is input block id)1389request: the gr.Request object containing information about the network request (e.g. IP address, headers, query parameters, username)1390iterators: the in-progress iterators for each generator function (key is function index)1391event_id: id of event that triggered this API call1392event_data: data associated with the event trigger itself1393Returns: None1394"""1395block_fn = self.fns[fn_index]1396batch = self.dependencies[fn_index]["batch"]13971398if batch:1399max_batch_size = self.dependencies[fn_index]["max_batch_size"]1400batch_sizes = [len(inp) for inp in inputs]1401batch_size = batch_sizes[0]1402if inspect.isasyncgenfunction(block_fn.fn) or inspect.isgeneratorfunction(1403block_fn.fn1404):1405raise ValueError("Gradio does not support generators in batch mode.")1406if not all(x == batch_size for x in batch_sizes):1407raise ValueError(1408f"All inputs to a batch function must have the same length but instead have sizes: {batch_sizes}."1409)1410if batch_size > max_batch_size:1411raise ValueError(1412f"Batch size ({batch_size}) exceeds the max_batch_size for this function ({max_batch_size})"1413)14141415inputs = [1416self.preprocess_data(fn_index, list(i), state) for i in zip(*inputs)1417]1418result = await self.call_function(1419fn_index, list(zip(*inputs)), None, request, event_id, event_data1420)1421preds = result["prediction"]1422data = [1423self.postprocess_data(fn_index, list(o), state) for o in zip(*preds)1424]1425data = list(zip(*data))1426is_generating, iterator = None, None1427else:1428old_iterator = iterators.get(fn_index, None) if iterators else None1429if old_iterator:1430inputs = []1431else:1432inputs = self.preprocess_data(fn_index, inputs, state)1433was_generating = old_iterator is not None1434result = await self.call_function(1435fn_index, inputs, old_iterator, request, event_id, event_data1436)1437data = self.postprocess_data(fn_index, result["prediction"], state)1438is_generating, iterator = result["is_generating"], result["iterator"]1439if is_generating or was_generating:1440data = self.handle_streaming_outputs(1441fn_index,1442data,1443session_hash=session_hash,1444run=id(old_iterator) if was_generating else id(iterator),1445)14461447block_fn.total_runtime += result["duration"]1448block_fn.total_runs += 11449return {1450"data": data,1451"is_generating": is_generating,1452"iterator": iterator,1453"duration": result["duration"],1454"average_duration": block_fn.total_runtime / block_fn.total_runs,1455}14561457async def create_limiter(self):1458self.limiter = (1459None1460if self.max_threads == 401461else CapacityLimiter(total_tokens=self.max_threads)1462)14631464def get_config(self):1465return {"type": "column"}14661467def get_config_file(self):1468config = {1469"version": routes.VERSION,1470"mode": self.mode,1471"dev_mode": self.dev_mode,1472"analytics_enabled": self.analytics_enabled,1473"components": [],1474"css": self.css,1475"title": self.title or "Gradio",1476"space_id": self.space_id,1477"enable_queue": getattr(self, "enable_queue", False), # launch attributes1478"show_error": getattr(self, "show_error", False),1479"show_api": self.show_api,1480"is_colab": utils.colab_check(),1481"stylesheets": self.stylesheets,1482"theme": self.theme.name,1483}14841485def get_layout(block):1486if not isinstance(block, BlockContext):1487return {"id": block._id}1488children_layout = []1489for child in block.children:1490children_layout.append(get_layout(child))1491return {"id": block._id, "children": children_layout}14921493config["layout"] = get_layout(self)14941495for _id, block in self.blocks.items():1496props = block.get_config() if hasattr(block, "get_config") else {}1497block_config = {1498"id": _id,1499"type": block.get_block_name(),1500"props": utils.delete_none(props),1501}1502serializer = utils.get_serializer_name(block)1503if serializer:1504assert isinstance(block, serializing.Serializable)1505block_config["serializer"] = serializer1506block_config["api_info"] = block.api_info() # type: ignore1507block_config["example_inputs"] = block.example_inputs() # type: ignore1508config["components"].append(block_config)1509config["dependencies"] = self.dependencies1510return config15111512def __enter__(self):1513if Context.block is None:1514Context.root_block = self1515self.parent = Context.block1516Context.block = self1517self.exited = False1518return self15191520def __exit__(self, *args):1521super().fill_expected_parents()1522Context.block = self.parent1523# Configure the load events before root_block is reset1524self.attach_load_events()1525if self.parent is None:1526Context.root_block = None1527else:1528self.parent.children.extend(self.children)1529self.config = self.get_config_file()1530self.app = routes.App.create_app(self)1531self.progress_tracking = any(block_fn.tracks_progress for block_fn in self.fns)1532self.exited = True15331534@class_or_instancemethod1535def load(1536self_or_cls, # noqa: N8051537fn: Callable | None = None,1538inputs: list[Component] | None = None,1539outputs: list[Component] | None = None,1540api_name: str | None | Literal[False] = None,1541scroll_to_output: bool = False,1542show_progress: str = "full",1543queue=None,1544batch: bool = False,1545max_batch_size: int = 4,1546preprocess: bool = True,1547postprocess: bool = True,1548every: float | None = None,1549_js: str | None = None,1550*,1551name: str | None = None,1552src: str | None = None,1553api_key: str | None = None,1554alias: str | None = None,1555**kwargs,1556) -> Blocks | dict[str, Any] | None:1557"""1558For reverse compatibility reasons, this is both a class method and an instance1559method, the two of which, confusingly, do two completely different things.156015611562Class method: loads a demo from a Hugging Face Spaces repo and creates it locally and returns a block instance. Warning: this method will be deprecated. Use the equivalent `gradio.load()` instead.156315641565Instance method: adds event that runs as soon as the demo loads in the browser. Example usage below.1566Parameters:1567name: Class Method - the name of the model (e.g. "gpt2" or "facebook/bart-base") or space (e.g. "flax-community/spanish-gpt2"), can include the `src` as prefix (e.g. "models/facebook/bart-base")1568src: Class Method - the source of the model: `models` or `spaces` (or leave empty if source is provided as a prefix in `name`)1569api_key: Class Method - optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens. Warning: only provide this if you are loading a trusted private Space as it can be read by the Space you are loading.1570alias: Class Method - optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x)1571fn: Instance Method - the function to wrap an interface around. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.1572inputs: Instance Method - List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list.1573outputs: Instance Method - List of gradio.components to use as inputs. If the function returns no outputs, this should be an empty list.1574api_name: Instance Method - Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, the endpoint will be exposed in the api docs as an unnamed endpoint, although this behavior will be changed in Gradio 4.0. If set to a string, the endpoint will be exposed in the api docs with the given name.1575scroll_to_output: Instance Method - If True, will scroll to output component on completion1576show_progress: Instance Method - If True, will show progress animation while pending1577queue: Instance Method - If True, will place the request on the queue, if the queue exists1578batch: Instance Method - If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.1579max_batch_size: Instance Method - Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)1580preprocess: Instance Method - If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component).1581postprocess: Instance Method - If False, will not run postprocessing of component data before returning 'fn' output to the browser.1582every: Instance Method - Run this event 'every' number of seconds. Interpreted in seconds. Queue must be enabled.1583Example:1584import gradio as gr1585import datetime1586with gr.Blocks() as demo:1587def get_time():1588return datetime.datetime.now().time()1589dt = gr.Textbox(label="Current time")1590demo.load(get_time, inputs=None, outputs=dt)1591demo.launch()1592"""1593if isinstance(self_or_cls, type):1594warn_deprecation(1595"gr.Blocks.load() will be deprecated. Use gr.load() instead."1596)1597if name is None:1598raise ValueError(1599"Blocks.load() requires passing parameters as keyword arguments"1600)1601return external.load(1602name=name, src=src, hf_token=api_key, alias=alias, **kwargs1603)1604else:1605from gradio.events import Dependency16061607dep, dep_index = self_or_cls.set_event_trigger(1608event_name="load",1609fn=fn,1610inputs=inputs,1611outputs=outputs,1612api_name=api_name,1613preprocess=preprocess,1614postprocess=postprocess,1615scroll_to_output=scroll_to_output,1616show_progress=show_progress,1617js=_js,1618queue=queue,1619batch=batch,1620max_batch_size=max_batch_size,1621every=every,1622no_target=True,1623)1624return Dependency(self_or_cls, dep, dep_index)16251626def clear(self):1627"""Resets the layout of the Blocks object."""1628self.blocks = {}1629self.fns = []1630self.dependencies = []1631self.children = []1632return self16331634@concurrency_count_warning1635@document()1636def queue(1637self,1638concurrency_count: int = 1,1639status_update_rate: float | Literal["auto"] = "auto",1640client_position_to_load_data: int | None = None,1641default_enabled: bool | None = None,1642api_open: bool = True,1643max_size: int | None = None,1644):1645"""1646By enabling the queue you can control the rate of processed requests, let users know their position in the queue, and set a limit on maximum number of events allowed.1647Parameters:1648concurrency_count: Number of worker threads that will be processing requests from the queue concurrently. Increasing this number will increase the rate at which requests are processed, but will also increase the memory usage of the queue.1649status_update_rate: If "auto", Queue will send status estimations to all clients whenever a job is finished. Otherwise Queue will send status at regular intervals set by this parameter as the number of seconds.1650client_position_to_load_data: DEPRECATED. This parameter is deprecated and has no effect.1651default_enabled: Deprecated and has no effect.1652api_open: If True, the REST routes of the backend will be open, allowing requests made directly to those endpoints to skip the queue.1653max_size: The maximum number of events the queue will store at any given moment. If the queue is full, new events will not be added and a user will receive a message saying that the queue is full. If None, the queue size will be unlimited.1654Example: (Blocks)1655with gr.Blocks() as demo:1656button = gr.Button(label="Generate Image")1657button.click(fn=image_generator, inputs=gr.Textbox(), outputs=gr.Image())1658demo.queue(max_size=10)1659demo.launch()1660Example: (Interface)1661demo = gr.Interface(image_generator, gr.Textbox(), gr.Image())1662demo.queue(max_size=20)1663demo.launch()1664"""1665if default_enabled is not None:1666warn_deprecation(1667"The default_enabled parameter of queue has no effect and will be removed "1668"in a future version of gradio."1669)1670self.enable_queue = True1671self.api_open = api_open1672if client_position_to_load_data is not None:1673warn_deprecation(1674"The client_position_to_load_data parameter is deprecated."1675)1676if utils.is_zero_gpu_space():1677concurrency_count = self.max_threads1678max_size = 1 if max_size is None else max_size1679self._queue = queueing.Queue(1680live_updates=status_update_rate == "auto",1681concurrency_count=concurrency_count,1682update_intervals=status_update_rate if status_update_rate != "auto" else 1,1683max_size=max_size,1684blocks_dependencies=self.dependencies,1685)1686self.config = self.get_config_file()1687self.app = routes.App.create_app(self)1688return self16891690def validate_queue_settings(self):1691if not self.enable_queue and self.progress_tracking:1692raise ValueError("Progress tracking requires queuing to be enabled.")16931694for fn_index, dep in enumerate(self.dependencies):1695if not self.enable_queue and self.queue_enabled_for_fn(fn_index):1696raise ValueError(1697f"The queue is enabled for event {dep['api_name'] if dep['api_name'] else fn_index} "1698"but the queue has not been enabled for the app. Please call .queue() "1699"on your app. Consult https://gradio.app/docs/#blocks-queue for information on how "1700"to configure the queue."1701)1702for i in dep["cancels"]:1703if not self.queue_enabled_for_fn(i):1704raise ValueError(1705"Queue needs to be enabled! "1706"You may get this error by either 1) passing a function that uses the yield keyword "1707"into an interface without enabling the queue or 2) defining an event that cancels "1708"another event without enabling the queue. Both can be solved by calling .queue() "1709"before .launch()"1710)1711if dep["batch"] and (1712dep["queue"] is False1713or (dep["queue"] is None and not self.enable_queue)1714):1715raise ValueError("In order to use batching, the queue must be enabled.")17161717def launch(1718self,1719inline: bool | None = None,1720inbrowser: bool = False,1721share: bool | None = None,1722debug: bool = False,1723enable_queue: bool | None = None,1724max_threads: int = 40,1725auth: Callable | tuple[str, str] | list[tuple[str, str]] | None = None,1726auth_message: str | None = None,1727prevent_thread_lock: bool = False,1728show_error: bool = False,1729server_name: str | None = None,1730server_port: int | None = None,1731show_tips: bool = False,1732height: int = 500,1733width: int | str = "100%",1734encrypt: bool | None = None,1735favicon_path: str | None = None,1736ssl_keyfile: str | None = None,1737ssl_certfile: str | None = None,1738ssl_keyfile_password: str | None = None,1739ssl_verify: bool = True,1740quiet: bool = False,1741show_api: bool = True,1742file_directories: list[str] | None = None,1743allowed_paths: list[str] | None = None,1744blocked_paths: list[str] | None = None,1745root_path: str | None = None,1746_frontend: bool = True,1747app_kwargs: dict[str, Any] | None = None,1748) -> tuple[FastAPI, str, str]:1749"""1750Launches a simple web server that serves the demo. Can also be used to create a1751public link used by anyone to access the demo from their browser by setting share=True.17521753Parameters:1754inline: whether to display in the interface inline in an iframe. Defaults to True in python notebooks; False otherwise.1755inbrowser: whether to automatically launch the interface in a new tab on the default browser.1756share: whether to create a publicly shareable link for the interface. Creates an SSH tunnel to make your UI accessible from anywhere. If not provided, it is set to False by default every time, except when running in Google Colab. When localhost is not accessible (e.g. Google Colab), setting share=False is not supported.1757debug: if True, blocks the main thread from running. If running in Google Colab, this is needed to print the errors in the cell output.1758auth: If provided, username and password (or list of username-password tuples) required to access interface. Can also provide function that takes username and password and returns True if valid login.1759auth_message: If provided, HTML message provided on login page.1760prevent_thread_lock: If True, the interface will block the main thread while the server is running.1761show_error: If True, any errors in the interface will be displayed in an alert modal and printed in the browser console log1762server_port: will start gradio app on this port (if available). Can be set by environment variable GRADIO_SERVER_PORT. If None, will search for an available port starting at 7860.1763server_name: to make app accessible on local network, set this to "0.0.0.0". Can be set by environment variable GRADIO_SERVER_NAME. If None, will use "127.0.0.1".1764show_tips: if True, will occasionally show tips about new Gradio features1765enable_queue: DEPRECATED (use .queue() method instead.) if True, inference requests will be served through a queue instead of with parallel threads. Required for longer inference times (> 1min) to prevent timeout. The default option in HuggingFace Spaces is True. The default option elsewhere is False.1766max_threads: the maximum number of total threads that the Gradio app can generate in parallel. The default is inherited from the starlette library (currently 40). Applies whether the queue is enabled or not. But if queuing is enabled, this parameter is increaseed to be at least the concurrency_count of the queue.1767width: The width in pixels of the iframe element containing the interface (used if inline=True)1768height: The height in pixels of the iframe element containing the interface (used if inline=True)1769encrypt: DEPRECATED. Has no effect.1770favicon_path: If a path to a file (.png, .gif, or .ico) is provided, it will be used as the favicon for the web page.1771ssl_keyfile: If a path to a file is provided, will use this as the private key file to create a local server running on https.1772ssl_certfile: If a path to a file is provided, will use this as the signed certificate for https. Needs to be provided if ssl_keyfile is provided.1773ssl_keyfile_password: If a password is provided, will use this with the ssl certificate for https.1774ssl_verify: If False, skips certificate validation which allows self-signed certificates to be used.1775quiet: If True, suppresses most print statements.1776show_api: If True, shows the api docs in the footer of the app. Default True. If the queue is enabled, then api_open parameter of .queue() will determine if the api docs are shown, independent of the value of show_api.1777file_directories: This parameter has been renamed to `allowed_paths`. It will be removed in a future version.1778allowed_paths: List of complete filepaths or parent directories that gradio is allowed to serve (in addition to the directory containing the gradio python file). Must be absolute paths. Warning: if you provide directories, any files in these directories or their subdirectories are accessible to all users of your app.1779blocked_paths: List of complete filepaths or parent directories that gradio is not allowed to serve (i.e. users of your app are not allowed to access). Must be absolute paths. Warning: takes precedence over `allowed_paths` and all other directories exposed by Gradio by default.1780root_path: The root path (or "mount point") of the application, if it's not served from the root ("/") of the domain. Often used when the application is behind a reverse proxy that forwards requests to the application. For example, if the application is served at "https://example.com/myapp", the `root_path` should be set to "/myapp". Can be set by environment variable GRADIO_ROOT_PATH. Defaults to "".1781app_kwargs: Additional keyword arguments to pass to the underlying FastAPI app as a dictionary of parameter keys and argument values. For example, `{"docs_url": "/docs"}`1782Returns:1783app: FastAPI app object that is running the demo1784local_url: Locally accessible link to the demo1785share_url: Publicly accessible link to the demo (if share=True, otherwise None)1786Example: (Blocks)1787import gradio as gr1788def reverse(text):1789return text[::-1]1790with gr.Blocks() as demo:1791button = gr.Button(value="Reverse")1792button.click(reverse, gr.Textbox(), gr.Textbox())1793demo.launch(share=True, auth=("username", "password"))1794Example: (Interface)1795import gradio as gr1796def reverse(text):1797return text[::-1]1798demo = gr.Interface(reverse, "text", "text")1799demo.launch(share=True, auth=("username", "password"))1800"""1801if not self.exited:1802self.__exit__()18031804self.dev_mode = False1805if (1806auth1807and not callable(auth)1808and not isinstance(auth[0], tuple)1809and not isinstance(auth[0], list)1810):1811self.auth = [auth]1812else:1813self.auth = auth1814self.auth_message = auth_message1815self.show_tips = show_tips1816self.show_error = show_error1817self.height = height1818self.width = width1819self.favicon_path = favicon_path1820self.ssl_verify = ssl_verify1821if root_path is None:1822self.root_path = os.environ.get("GRADIO_ROOT_PATH", "")1823else:1824self.root_path = root_path18251826if enable_queue is not None:1827self.enable_queue = enable_queue1828warn_deprecation(1829"The `enable_queue` parameter has been deprecated. "1830"Please use the `.queue()` method instead.",1831)1832if encrypt is not None:1833warn_deprecation(1834"The `encrypt` parameter has been deprecated and has no effect.",1835)18361837if self.space_id:1838self.enable_queue = self.enable_queue is not False1839else:1840self.enable_queue = self.enable_queue is True1841if self.enable_queue and not hasattr(self, "_queue"):1842self.queue()1843self.show_api = self.api_open if self.enable_queue else show_api18441845if file_directories is not None:1846warn_deprecation(1847"The `file_directories` parameter has been renamed to `allowed_paths`. "1848"Please use that instead.",1849)1850if allowed_paths is None:1851allowed_paths = file_directories1852self.allowed_paths = allowed_paths or []1853self.blocked_paths = blocked_paths or []18541855if not isinstance(self.allowed_paths, list):1856raise ValueError("`allowed_paths` must be a list of directories.")1857if not isinstance(self.blocked_paths, list):1858raise ValueError("`blocked_paths` must be a list of directories.")18591860self.validate_queue_settings()18611862self.config = self.get_config_file()1863self.max_threads = max(1864self._queue.max_thread_count if self.enable_queue else 0, max_threads1865)18661867if self.is_running:1868assert isinstance(1869self.local_url, str1870), f"Invalid local_url: {self.local_url}"1871if not (quiet):1872print(1873"Rerunning server... use `close()` to stop if you need to change `launch()` parameters.\n----"1874)1875else:1876if wasm_utils.IS_WASM:1877server_name = "xxx"1878server_port = 999991879local_url = ""1880server = None18811882# In the Wasm environment, we only need the app object1883# which the frontend app will directly communicate with through the Worker API,1884# and we don't need to start a server.1885# So we just create the app object and register it here,1886# and avoid using `networking.start_server` that would start a server that don't work in the Wasm env.1887from gradio.routes import App18881889app = App.create_app(self, app_kwargs=app_kwargs)1890wasm_utils.register_app(app)1891else:1892(1893server_name,1894server_port,1895local_url,1896app,1897server,1898) = networking.start_server(1899self,1900server_name,1901server_port,1902ssl_keyfile,1903ssl_certfile,1904ssl_keyfile_password,1905app_kwargs=app_kwargs,1906)1907self.server_name = server_name1908self.local_url = local_url1909self.server_port = server_port1910self.server_app = app1911self.server = server1912self.is_running = True1913self.is_colab = utils.colab_check()1914self.is_kaggle = utils.kaggle_check()19151916self.protocol = (1917"https"1918)19191920if self.enable_queue:1921self._queue.set_url(self.local_url)19221923if not wasm_utils.IS_WASM:1924# Cannot run async functions in background other than app's scope.1925# Workaround by triggering the app endpoint1926requests.get(f"{self.local_url}startup-events", verify=ssl_verify)1927else:1928pass1929# TODO: Call the startup endpoint in the Wasm env too.19301931utils.launch_counter()1932self.is_sagemaker = utils.sagemaker_check()1933if share is None:1934if self.is_colab and self.enable_queue:1935if not quiet:1936print(1937"Setting queue=True in a Colab notebook requires sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n"1938)1939self.share = True1940elif self.is_kaggle:1941if not quiet:1942print(1943"Kaggle notebooks require sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n"1944)1945self.share = True1946elif self.is_sagemaker:1947if not quiet:1948print(1949"Sagemaker notebooks may require sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n"1950)1951self.share = True1952else:1953self.share = False1954else:1955self.share = share19561957# If running in a colab or not able to access localhost,1958# a shareable link must be created.1959if (1960_frontend1961and not wasm_utils.IS_WASM1962and not networking.url_ok(self.local_url)1963and not self.share1964):1965raise ValueError(1966"When localhost is not accessible, a shareable link must be created. Please set share=True or check your proxy settings to allow access to localhost."1967)19681969if self.is_colab:1970if not quiet:1971if debug:1972print(strings.en["COLAB_DEBUG_TRUE"])1973else:1974print(strings.en["COLAB_DEBUG_FALSE"])1975if not self.share:1976print(strings.en["COLAB_WARNING"].format(self.server_port))1977if self.enable_queue and not self.share:1978raise ValueError(1979"When using queueing in Colab, a shareable link must be created. Please set share=True."1980)1981else:1982if not self.share:1983print(f'Running on local URL: https://{self.server_name}')19841985if self.share:1986if self.space_id:1987raise RuntimeError("Share is not supported when you are in Spaces")1988if wasm_utils.IS_WASM:1989raise RuntimeError("Share is not supported in the Wasm environment")1990try:1991if self.share_url is None:1992self.share_url = networking.setup_tunnel(1993self.server_name, self.server_port, self.share_token1994)1995print(strings.en["SHARE_LINK_DISPLAY"].format(self.share_url))1996if not (quiet):1997print('[32m\u2714 Connected')1998except (RuntimeError, requests.exceptions.ConnectionError):1999if self.analytics_enabled:2000analytics.error_analytics("Not able to set up tunnel")2001self.share_url = None2002self.share = False2003if Path(BINARY_PATH).exists():2004print(strings.en["COULD_NOT_GET_SHARE_LINK"])2005else:2006print(2007strings.en["COULD_NOT_GET_SHARE_LINK_MISSING_FILE"].format(2008BINARY_PATH,2009BINARY_URL,2010BINARY_FILENAME,2011BINARY_FOLDER,2012)2013)2014else:2015if not quiet and not wasm_utils.IS_WASM:2016print('[32m\u2714 Connected')2017self.share_url = None20182019if inbrowser and not wasm_utils.IS_WASM:2020link = self.share_url if self.share and self.share_url else self.local_url2021webbrowser.open(link)20222023# Check if running in a Python notebook in which case, display inline2024if inline is None:2025inline = utils.ipython_check()2026if inline:2027try:2028from IPython.display import HTML, Javascript, display # type: ignore20292030if self.share and self.share_url:2031while not networking.url_ok(self.share_url):2032time.sleep(0.25)2033display(2034HTML(2035f'<div><iframe src="{self.share_url}" width="{self.width}" height="{self.height}" allow="autoplay; camera; microphone; clipboard-read; clipboard-write;" frameborder="0" allowfullscreen></iframe></div>'2036)2037)2038elif self.is_colab:2039# modified from /usr/local/lib/python3.7/dist-packages/google/colab/output/_util.py within Colab environment2040code = """(async (port, path, width, height, cache, element) => {2041if (!google.colab.kernel.accessAllowed && !cache) {2042return;2043}2044element.appendChild(document.createTextNode(''));2045const url = await google.colab.kernel.proxyPort(port, {cache});20462047const external_link = document.createElement('div');2048external_link.innerHTML = `2049<div style="font-family: monospace; margin-bottom: 0.5rem">2050Running on <a href=${new URL(path, url).toString()} target="_blank">2051https://localhost:${port}${path}2052</a>2053</div>2054`;2055element.appendChild(external_link);20562057const iframe = document.createElement('iframe');2058iframe.src = new URL(path, url).toString();2059iframe.height = height;2060iframe.allow = "autoplay; camera; microphone; clipboard-read; clipboard-write;"2061iframe.width = width;2062iframe.style.border = 0;2063element.appendChild(iframe);2064})""" + "({port}, {path}, {width}, {height}, {cache}, window.element)".format(2065port=json.dumps(self.server_port),2066path=json.dumps("/"),2067width=json.dumps(self.width),2068height=json.dumps(self.height),2069cache=json.dumps(False),2070)20712072display(Javascript(code))2073else:2074display(2075HTML(2076f'<div><iframe src="{self.local_url}" width="{self.width}" height="{self.height}" allow="autoplay; camera; microphone; clipboard-read; clipboard-write;" frameborder="0" allowfullscreen></iframe></div>'2077)2078)2079except ImportError:2080pass20812082if getattr(self, "analytics_enabled", False):2083data = {2084"launch_method": "browser" if inbrowser else "inline",2085"is_google_colab": self.is_colab,2086"is_sharing_on": self.share,2087"share_url": self.share_url,2088"enable_queue": self.enable_queue,2089"show_tips": self.show_tips,2090"server_name": server_name,2091"server_port": server_port,2092"is_space": self.space_id is not None,2093"mode": self.mode,2094}2095analytics.launched_analytics(self, data)20962097utils.show_tip(self)20982099# Block main thread if debug==True2100if debug or int(os.getenv("GRADIO_DEBUG", 0)) == 1 and not wasm_utils.IS_WASM:2101self.block_thread()2102# Block main thread if running in a script to stop script from exiting2103is_in_interactive_mode = bool(getattr(sys, "ps1", sys.flags.interactive))21042105if (2106not prevent_thread_lock2107and not is_in_interactive_mode2108# In the Wasm env, we don't have to block the main thread because the server won't be shut down after the execution finishes.2109# Moreover, we MUST NOT do it because there is only one thread in the Wasm env and blocking it will stop the subsequent code from running.2110and not wasm_utils.IS_WASM2111):2112self.block_thread()21132114return TupleNoPrint((self.server_app, self.local_url, self.share_url))21152116def integrate(2117self,2118comet_ml=None,2119wandb: ModuleType | None = None,2120mlflow: ModuleType | None = None,2121) -> None:2122"""2123A catch-all method for integrating with other libraries. This method should be run after launch()2124Parameters:2125comet_ml: If a comet_ml Experiment object is provided, will integrate with the experiment and appear on Comet dashboard2126wandb: If the wandb module is provided, will integrate with it and appear on WandB dashboard2127mlflow: If the mlflow module is provided, will integrate with the experiment and appear on ML Flow dashboard2128"""2129analytics_integration = ""2130if comet_ml is not None:2131analytics_integration = "CometML"2132comet_ml.log_other("Created from", "Gradio")2133if self.share_url is not None:2134comet_ml.log_text(f"gradio: {self.share_url}")2135comet_ml.end()2136elif self.local_url:2137comet_ml.log_text(f"gradio: {self.local_url}")2138comet_ml.end()2139else:2140raise ValueError("Please run `launch()` first.")2141if wandb is not None:2142analytics_integration = "WandB"2143if self.share_url is not None:2144wandb.log(2145{2146"Gradio panel": wandb.Html(2147'<iframe src="'2148+ self.share_url2149+ '" width="'2150+ str(self.width)2151+ '" height="'2152+ str(self.height)2153+ '" frameBorder="0"></iframe>'2154)2155}2156)2157else:2158print(2159"The WandB integration requires you to "2160"`launch(share=True)` first."2161)2162if mlflow is not None:2163analytics_integration = "MLFlow"2164if self.share_url is not None:2165mlflow.log_param("Gradio Interface Share Link", self.share_url)2166else:2167mlflow.log_param("Gradio Interface Local Link", self.local_url)2168if self.analytics_enabled and analytics_integration:2169data = {"integration": analytics_integration}2170analytics.integration_analytics(data)21712172def close(self, verbose: bool = True) -> None:2173"""2174Closes the Interface that was launched and frees the port.2175"""2176try:2177if self.enable_queue:2178self._queue.close()2179if self.server:2180self.server.close()2181self.is_running = False2182# So that the startup events (starting the queue)2183# happen the next time the app is launched2184self.app.startup_events_triggered = False2185if verbose:2186print(f"Closing server running on port: {self.server_port}")2187except (AttributeError, OSError): # can't close if not running2188pass21892190def block_thread(2191self,2192) -> None:2193"""Block main thread until interrupted by user."""2194try:2195while True:2196time.sleep(0.1)2197except (KeyboardInterrupt, OSError):2198print("Keyboard interruption in main thread... closing server.")2199if self.server:2200self.server.close()2201for tunnel in CURRENT_TUNNELS:2202tunnel.kill()22032204def attach_load_events(self):2205"""Add a load event for every component whose initial value should be randomized."""2206if Context.root_block:2207for component in Context.root_block.blocks.values():2208if (2209isinstance(component, components.IOComponent)2210and component.load_event_to_attach2211):2212load_fn, every = component.load_event_to_attach2213# Use set_event_trigger to avoid ambiguity between load class/instance method2214dep = self.set_event_trigger(2215"load",2216load_fn,2217None,2218component,2219no_target=True,2220# If every is None, for sure skip the queue2221# else, let the enable_queue parameter take precedence2222# this will raise a nice error message is every is used2223# without queue2224queue=False if every is None else None,2225every=every,2226)[0]2227component.load_event = dep22282229def startup_events(self):2230"""Events that should be run when the app containing this block starts up."""22312232if self.enable_queue:2233utils.run_coro_in_background(self._queue.start, self.ssl_verify)2234# So that processing can resume in case the queue was stopped2235self._queue.stopped = False2236utils.run_coro_in_background(self.create_limiter)22372238def queue_enabled_for_fn(self, fn_index: int):2239if self.dependencies[fn_index]["queue"] is None:2240return self.enable_queue2241return self.dependencies[fn_index]["queue"]224222432244