Path: blob/master/ invest-robot-contest_TinkoffBotTwitch-main/venv/lib/python3.8/site-packages/pandas/io/gbq.py
7815 views
""" Google BigQuery support """1from __future__ import annotations23from typing import (4TYPE_CHECKING,5Any,6)78from pandas.compat._optional import import_optional_dependency910if TYPE_CHECKING:11from pandas import DataFrame121314def _try_import():15# since pandas is a dependency of pandas-gbq16# we need to import on first use17msg = (18"pandas-gbq is required to load data from Google BigQuery. "19"See the docs: https://pandas-gbq.readthedocs.io."20)21pandas_gbq = import_optional_dependency("pandas_gbq", extra=msg)22return pandas_gbq232425def read_gbq(26query: str,27project_id: str | None = None,28index_col: str | None = None,29col_order: list[str] | None = None,30reauth: bool = False,31auth_local_webserver: bool = False,32dialect: str | None = None,33location: str | None = None,34configuration: dict[str, Any] | None = None,35credentials=None,36use_bqstorage_api: bool | None = None,37max_results: int | None = None,38progress_bar_type: str | None = None,39) -> DataFrame:40"""41Load data from Google BigQuery.4243This function requires the `pandas-gbq package44<https://pandas-gbq.readthedocs.io>`__.4546See the `How to authenticate with Google BigQuery47<https://pandas-gbq.readthedocs.io/en/latest/howto/authentication.html>`__48guide for authentication instructions.4950Parameters51----------52query : str53SQL-Like Query to return data values.54project_id : str, optional55Google BigQuery Account project ID. Optional when available from56the environment.57index_col : str, optional58Name of result column to use for index in results DataFrame.59col_order : list(str), optional60List of BigQuery column names in the desired order for results61DataFrame.62reauth : bool, default False63Force Google BigQuery to re-authenticate the user. This is useful64if multiple accounts are used.65auth_local_webserver : bool, default False66Use the `local webserver flow`_ instead of the `console flow`_67when getting user credentials.6869.. _local webserver flow:70https://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server71.. _console flow:72https://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console7374*New in version 0.2.0 of pandas-gbq*.75dialect : str, default 'legacy'76Note: The default value is changing to 'standard' in a future version.7778SQL syntax dialect to use. Value can be one of:7980``'legacy'``81Use BigQuery's legacy SQL dialect. For more information see82`BigQuery Legacy SQL Reference83<https://cloud.google.com/bigquery/docs/reference/legacy-sql>`__.84``'standard'``85Use BigQuery's standard SQL, which is86compliant with the SQL 2011 standard. For more information87see `BigQuery Standard SQL Reference88<https://cloud.google.com/bigquery/docs/reference/standard-sql/>`__.89location : str, optional90Location where the query job should run. See the `BigQuery locations91documentation92<https://cloud.google.com/bigquery/docs/dataset-locations>`__ for a93list of available locations. The location must match that of any94datasets used in the query.9596*New in version 0.5.0 of pandas-gbq*.97configuration : dict, optional98Query config parameters for job processing.99For example:100101configuration = {'query': {'useQueryCache': False}}102103For more information see `BigQuery REST API Reference104<https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query>`__.105credentials : google.auth.credentials.Credentials, optional106Credentials for accessing Google APIs. Use this parameter to override107default credentials, such as to use Compute Engine108:class:`google.auth.compute_engine.Credentials` or Service Account109:class:`google.oauth2.service_account.Credentials` directly.110111*New in version 0.8.0 of pandas-gbq*.112use_bqstorage_api : bool, default False113Use the `BigQuery Storage API114<https://cloud.google.com/bigquery/docs/reference/storage/>`__ to115download query results quickly, but at an increased cost. To use this116API, first `enable it in the Cloud Console117<https://console.cloud.google.com/apis/library/bigquerystorage.googleapis.com>`__.118You must also have the `bigquery.readsessions.create119<https://cloud.google.com/bigquery/docs/access-control#roles>`__120permission on the project you are billing queries to.121122This feature requires version 0.10.0 or later of the ``pandas-gbq``123package. It also requires the ``google-cloud-bigquery-storage`` and124``fastavro`` packages.125126.. versionadded:: 0.25.0127max_results : int, optional128If set, limit the maximum number of rows to fetch from the query129results.130131*New in version 0.12.0 of pandas-gbq*.132133.. versionadded:: 1.1.0134progress_bar_type : Optional, str135If set, use the `tqdm <https://tqdm.github.io/>`__ library to136display a progress bar while the data downloads. Install the137``tqdm`` package to use this feature.138139Possible values of ``progress_bar_type`` include:140141``None``142No progress bar.143``'tqdm'``144Use the :func:`tqdm.tqdm` function to print a progress bar145to :data:`sys.stderr`.146``'tqdm_notebook'``147Use the :func:`tqdm.tqdm_notebook` function to display a148progress bar as a Jupyter notebook widget.149``'tqdm_gui'``150Use the :func:`tqdm.tqdm_gui` function to display a151progress bar as a graphical dialog box.152153Note that this feature requires version 0.12.0 or later of the154``pandas-gbq`` package. And it requires the ``tqdm`` package. Slightly155different than ``pandas-gbq``, here the default is ``None``.156157.. versionadded:: 1.0.0158159Returns160-------161df: DataFrame162DataFrame representing results of query.163164See Also165--------166pandas_gbq.read_gbq : This function in the pandas-gbq library.167DataFrame.to_gbq : Write a DataFrame to Google BigQuery.168"""169pandas_gbq = _try_import()170171kwargs: dict[str, str | bool | int | None] = {}172173# START: new kwargs. Don't populate unless explicitly set.174if use_bqstorage_api is not None:175kwargs["use_bqstorage_api"] = use_bqstorage_api176if max_results is not None:177kwargs["max_results"] = max_results178179kwargs["progress_bar_type"] = progress_bar_type180# END: new kwargs181182return pandas_gbq.read_gbq(183query,184project_id=project_id,185index_col=index_col,186col_order=col_order,187reauth=reauth,188auth_local_webserver=auth_local_webserver,189dialect=dialect,190location=location,191configuration=configuration,192credentials=credentials,193**kwargs,194)195196197def to_gbq(198dataframe: DataFrame,199destination_table: str,200project_id: str | None = None,201chunksize: int | None = None,202reauth: bool = False,203if_exists: str = "fail",204auth_local_webserver: bool = False,205table_schema: list[dict[str, str]] | None = None,206location: str | None = None,207progress_bar: bool = True,208credentials=None,209) -> None:210pandas_gbq = _try_import()211pandas_gbq.to_gbq(212dataframe,213destination_table,214project_id=project_id,215chunksize=chunksize,216reauth=reauth,217if_exists=if_exists,218auth_local_webserver=auth_local_webserver,219table_schema=table_schema,220location=location,221progress_bar=progress_bar,222credentials=credentials,223)224225226