blob: 35bd39101a8e4d72781e7b3d8e98aa62a0a4c0c6 [file] [log] [blame]
<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>airflow.contrib.hooks.bigquery_hook &mdash; Airflow Documentation</title>
<script type="text/javascript" src="../../../../../_static/js/modernizr.min.js"></script>
<script type="text/javascript" id="documentation_options" data-url_root="../../../../../" src="../../../../../_static/documentation_options.js"></script>
<script type="text/javascript" src="../../../../../_static/jquery.js"></script>
<script type="text/javascript" src="../../../../../_static/underscore.js"></script>
<script type="text/javascript" src="../../../../../_static/doctools.js"></script>
<script type="text/javascript" src="../../../../../_static/language_data.js"></script>
<script type="text/javascript" src="../../../../../_static/js/theme.js"></script>
<link rel="stylesheet" href="../../../../../_static/css/theme.css" type="text/css" />
<link rel="stylesheet" href="../../../../../_static/pygments.css" type="text/css" />
<link rel="index" title="Index" href="../../../../../genindex.html" />
<link rel="search" title="Search" href="../../../../../search.html" />
<link rel="next" title="airflow.contrib.hooks.cassandra_hook" href="../cassandra_hook/index.html" />
<link rel="prev" title="airflow.contrib.hooks.azure_fileshare_hook" href="../azure_fileshare_hook/index.html" />
<script>
document.addEventListener('DOMContentLoaded', function() {
var el = document.getElementById('changelog');
if (el !== null ) {
// [AIRFLOW-...]
el.innerHTML = el.innerHTML.replace(
/\[(AIRFLOW-[\d]+)\]/g,
`<a href="https://issues.apache.org/jira/browse/$1">[$1]</a>`
);
// (#...)
el.innerHTML = el.innerHTML.replace(
/\(#([\d]+)\)/g,
`<a href="https://github.com/apache/airflow/pull/$1">(#$1)</a>`
);
};
})
</script>
<style>
.example-header {
position: relative;
background: #9AAA7A;
padding: 8px 16px;
margin-bottom: 0;
}
.example-header--with-button {
padding-right: 166px;
}
.example-header:after{
content: '';
display: table;
clear: both;
}
.example-title {
display:block;
padding: 4px;
margin-right: 16px;
color: white;
overflow-x: auto;
}
.example-header-button {
top: 8px;
right: 16px;
position: absolute;
}
.example-header + .highlight-python {
margin-top: 0 !important;
}
.viewcode-button {
display: inline-block;
padding: 8px 16px;
border: 0;
margin: 0;
outline: 0;
border-radius: 2px;
-webkit-box-shadow: 0 3px 5px 0 rgba(0,0,0,.3);
box-shadow: 0 3px 6px 0 rgba(0,0,0,.3);
color: #404040;
background-color: #e7e7e7;
cursor: pointer;
font-size: 16px;
font-weight: 500;
line-height: 1;
text-decoration: none;
text-overflow: ellipsis;
overflow: hidden;
text-transform: uppercase;
-webkit-transition: background-color .2s;
transition: background-color .2s;
vertical-align: middle;
white-space: nowrap;
}
.viewcode-button:visited {
color: #404040;
}
.viewcode-button:hover, .viewcode-button:focus {
color: #404040;
background-color: #d6d6d6;
}
</style>
</head>
<body class="wy-body-for-nav">
<div class="wy-grid-for-nav">
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-scroll">
<div class="wy-side-nav-search" >
<a href="../../../../../index.html" class="icon icon-home"> Airflow
</a>
<div class="version">
1.10.3
</div>
<div role="search">
<form id="rtd-search-form" class="wy-form" action="../../../../../search.html" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
<ul class="current">
<li class="toctree-l1"><a class="reference internal" href="../../../../../project.html">Project</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../license.html">License</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../start.html">Quick Start</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../installation.html">Installation</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../tutorial.html">Tutorial</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../howto/index.html">How-to Guides</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../ui.html">UI / Screenshots</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../concepts.html">Concepts</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../profiling.html">Data Profiling</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../cli.html">Command Line Interface</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../scheduler.html">Scheduling &amp; Triggers</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../plugins.html">Plugins</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../security.html">Security</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../timezone.html">Time zones</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../api.html">Experimental Rest API</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../integration.html">Integration</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../metrics.html">Metrics</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../kubernetes.html">Kubernetes</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../lineage.html">Lineage</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../changelog.html">Changelog</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../faq.html">FAQ</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../../../macros.html">Macros reference</a></li>
<li class="toctree-l1 current"><a class="reference internal" href="../../../../index.html">API Reference</a><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="../../../../index.html#operators">Operators</a></li>
<li class="toctree-l2 current"><a class="reference internal" href="../../../../index.html#hooks">Hooks</a><ul class="current">
<li class="toctree-l3 current"><a class="reference internal" href="../../../../index.html#hooks-packages">Hooks packages</a><ul class="current">
<li class="toctree-l4"><a class="reference internal" href="../../../hooks/index.html"><code class="xref py py-mod docutils literal notranslate"><span class="pre">airflow.hooks</span></code></a></li>
<li class="toctree-l4 current"><a class="reference internal" href="../index.html"><code class="xref py py-mod docutils literal notranslate"><span class="pre">airflow.contrib.hooks</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../index.html#executors">Executors</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../index.html#models">Models</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../index.html#core-and-community-package">Core and community package</a></li>
</ul>
</li>
</ul>
</div>
</div>
</nav>
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
<nav class="wy-nav-top" aria-label="top navigation">
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="../../../../../index.html">Airflow</a>
</nav>
<div class="wy-nav-content">
<div class="rst-content">
<div role="navigation" aria-label="breadcrumbs navigation">
<ul class="wy-breadcrumbs">
<li><a href="../../../../../index.html">Docs</a> &raquo;</li>
<li><a href="../../../../index.html">API Reference</a> &raquo;</li>
<li><a href="../index.html"><code class="xref py py-mod docutils literal notranslate"><span class="pre">airflow.contrib.hooks</span></code></a> &raquo;</li>
<li><code class="xref py py-mod docutils literal notranslate"><span class="pre">airflow.contrib.hooks.bigquery_hook</span></code></li>
<li class="wy-breadcrumbs-aside">
<a href="../../../../../_sources/_api/airflow/contrib/hooks/bigquery_hook/index.rst.txt" rel="nofollow"> View page source</a>
</li>
</ul>
<hr/>
</div>
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
<div itemprop="articleBody">
<div class="section" id="module-airflow.contrib.hooks.bigquery_hook">
<span id="airflow-contrib-hooks-bigquery-hook"></span><h1><a class="reference internal" href="#module-airflow.contrib.hooks.bigquery_hook" title="airflow.contrib.hooks.bigquery_hook"><code class="xref py py-mod docutils literal notranslate"><span class="pre">airflow.contrib.hooks.bigquery_hook</span></code></a><a class="headerlink" href="#module-airflow.contrib.hooks.bigquery_hook" title="Permalink to this headline"></a></h1>
<p><p>This module contains a BigQuery Hook, as well as a very basic PEP 249
implementation for BigQuery.</p>
</p>
<div class="section" id="module-contents">
<h2>Module Contents<a class="headerlink" href="#module-contents" title="Permalink to this headline"></a></h2>
<dl class="class">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryHook">
<em class="property">class </em><code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">BigQueryHook</code><span class="sig-paren">(</span><em>bigquery_conn_id='bigquery_default'</em>, <em>delegate_to=None</em>, <em>use_legacy_sql=True</em>, <em>location=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryHook"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryHook" title="Permalink to this definition"></a></dt>
<dd><p>Bases:<a class="reference internal" href="../gcp_api_base_hook/index.html#airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook" title="airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook</span></code></a>, <a class="reference internal" href="../../../hooks/dbapi_hook/index.html#airflow.hooks.dbapi_hook.DbApiHook" title="airflow.hooks.dbapi_hook.DbApiHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.hooks.dbapi_hook.DbApiHook</span></code></a></p>
<p>Interact with BigQuery. This hook uses the Google Cloud Platform
connection.</p>
<dl class="attribute">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryHook.conn_name_attr">
<code class="descname">conn_name_attr</code><em class="property"> = bigquery_conn_id</em><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryHook.conn_name_attr"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryHook.conn_name_attr" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_conn">
<code class="descname">get_conn</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryHook.get_conn"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_conn" title="Permalink to this definition"></a></dt>
<dd><p>Returns a BigQuery PEP 249 connection object.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_service">
<code class="descname">get_service</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryHook.get_service"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_service" title="Permalink to this definition"></a></dt>
<dd><p>Returns a BigQuery service object.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryHook.insert_rows">
<code class="descname">insert_rows</code><span class="sig-paren">(</span><em>self</em>, <em>table</em>, <em>rows</em>, <em>target_fields=None</em>, <em>commit_every=1000</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryHook.insert_rows"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryHook.insert_rows" title="Permalink to this definition"></a></dt>
<dd><p>Insertion is currently unsupported. Theoretically, you could use
BigQuery’s streaming API to insert rows into a table, but this hasn’t
been implemented.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_pandas_df">
<code class="descname">get_pandas_df</code><span class="sig-paren">(</span><em>self</em>, <em>sql</em>, <em>parameters=None</em>, <em>dialect=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryHook.get_pandas_df"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_pandas_df" title="Permalink to this definition"></a></dt>
<dd><p>Returns a Pandas DataFrame for the results produced by a BigQuery
query. The DbApiHook method must be overridden because Pandas
doesn’t support PEP 249 connections, except for SQLite. See:</p>
<p><a class="reference external" href="https://github.com/pydata/pandas/blob/master/pandas/io/sql.py#L447">https://github.com/pydata/pandas/blob/master/pandas/io/sql.py#L447</a>
<a class="reference external" href="https://github.com/pydata/pandas/issues/6900">https://github.com/pydata/pandas/issues/6900</a></p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>sql</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The BigQuery SQL to execute.</p></li>
<li><p><strong>parameters</strong> (<em>mapping</em><em> or </em><em>iterable</em>) – The parameters to render the SQL query with (not
used, leave to override superclass method)</p></li>
<li><p><strong>dialect</strong> (<em>str in {'legacy'</em><em>, </em><em>'standard'}</em>) – Dialect of BigQuery SQL – legacy SQL or standard SQL
defaults to use <cite>self.use_legacy_sql</cite> if not specified</p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryHook.table_exists">
<code class="descname">table_exists</code><span class="sig-paren">(</span><em>self</em>, <em>project_id</em>, <em>dataset_id</em>, <em>table_id</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryHook.table_exists"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryHook.table_exists" title="Permalink to this definition"></a></dt>
<dd><p>Checks for the existence of a table in Google BigQuery.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>project_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The Google cloud project in which to look for the
table. The connection supplied to the hook must provide access to
the specified project.</p></li>
<li><p><strong>dataset_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The name of the dataset in which to look for the
table.</p></li>
<li><p><strong>table_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The name of the table to check the existence of.</p></li>
</ul>
</dd>
</dl>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryPandasConnector">
<em class="property">class </em><code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">BigQueryPandasConnector</code><span class="sig-paren">(</span><em>project_id</em>, <em>service</em>, <em>reauth=False</em>, <em>verbose=False</em>, <em>dialect='legacy'</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryPandasConnector"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryPandasConnector" title="Permalink to this definition"></a></dt>
<dd><p>Bases:<code class="xref py py-class docutils literal notranslate"><span class="pre">pandas_gbq.gbq.GbqConnector</span></code></p>
<p>This connector behaves identically to GbqConnector (from Pandas), except
that it allows the service to be injected, and disables a call to
self.get_credentials(). This allows Airflow to use BigQuery with Pandas
without forcing a three legged OAuth connection. Instead, we can inject
service account credentials into the binding.</p>
</dd></dl>
<dl class="class">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryConnection">
<em class="property">class </em><code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">BigQueryConnection</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryConnection"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryConnection" title="Permalink to this definition"></a></dt>
<dd><p>Bases:<a class="reference external" href="https://docs.python.org/3/library/functions.html#object" title="(in Python v3.7)"><code class="xref py py-class docutils literal notranslate"><span class="pre">object</span></code></a></p>
<p>BigQuery does not have a notion of a persistent connection. Thus, these
objects are small stateless factories for cursors, which do all the real
work.</p>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryConnection.close">
<code class="descname">close</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryConnection.close"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryConnection.close" title="Permalink to this definition"></a></dt>
<dd><p>BigQueryConnection does not have anything to close.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryConnection.commit">
<code class="descname">commit</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryConnection.commit"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryConnection.commit" title="Permalink to this definition"></a></dt>
<dd><p>BigQueryConnection does not support transactions.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryConnection.cursor">
<code class="descname">cursor</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryConnection.cursor"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryConnection.cursor" title="Permalink to this definition"></a></dt>
<dd><p>Return a new <code class="xref py py-class docutils literal notranslate"><span class="pre">Cursor</span></code> object using the connection.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryConnection.rollback">
<code class="descname">rollback</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryConnection.rollback"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryConnection.rollback" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
</dd></dl>
<dl class="class">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor">
<em class="property">class </em><code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">BigQueryBaseCursor</code><span class="sig-paren">(</span><em>service</em>, <em>project_id</em>, <em>use_legacy_sql=True</em>, <em>api_resource_configs=None</em>, <em>location=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor" title="Permalink to this definition"></a></dt>
<dd><p>Bases:<code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.utils.log.logging_mixin.LoggingMixin</span></code></p>
<p>The BigQuery base cursor contains helper methods to execute queries against
BigQuery. The methods can be used directly by operators, in cases where a
PEP 249 cursor isn’t needed.</p>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.create_empty_table">
<code class="descname">create_empty_table</code><span class="sig-paren">(</span><em>self</em>, <em>project_id</em>, <em>dataset_id</em>, <em>table_id</em>, <em>schema_fields=None</em>, <em>time_partitioning=None</em>, <em>labels=None</em>, <em>view=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.create_empty_table"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.create_empty_table" title="Permalink to this definition"></a></dt>
<dd><p>Creates a new, empty table in the dataset.
To create a view, which is defined by a SQL query, parse a dictionary to ‘view’ kwarg</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>project_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The project to create the table into.</p></li>
<li><p><strong>dataset_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The dataset to create the table into.</p></li>
<li><p><strong>table_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The Name of the table to be created.</p></li>
<li><p><strong>schema_fields</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – If set, the schema field list as defined here:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema">https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema</a></p></li>
<li><p><strong>labels</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – a dictionary containing labels for the table, passed to BigQuery</p></li>
</ul>
</dd>
</dl>
<p><strong>Example</strong>:</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">schema_fields</span><span class="o">=</span><span class="p">[{</span><span class="s2">&quot;name&quot;</span><span class="p">:</span> <span class="s2">&quot;emp_name&quot;</span><span class="p">,</span> <span class="s2">&quot;type&quot;</span><span class="p">:</span> <span class="s2">&quot;STRING&quot;</span><span class="p">,</span> <span class="s2">&quot;mode&quot;</span><span class="p">:</span> <span class="s2">&quot;REQUIRED&quot;</span><span class="p">},</span>
<span class="p">{</span><span class="s2">&quot;name&quot;</span><span class="p">:</span> <span class="s2">&quot;salary&quot;</span><span class="p">,</span> <span class="s2">&quot;type&quot;</span><span class="p">:</span> <span class="s2">&quot;INTEGER&quot;</span><span class="p">,</span> <span class="s2">&quot;mode&quot;</span><span class="p">:</span> <span class="s2">&quot;NULLABLE&quot;</span><span class="p">}]</span>
</pre></div>
</div>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>time_partitioning</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – <p>configure optional time partitioning fields i.e.
partition by field, type and expiration as per API specifications.</p>
<div class="admonition seealso">
<p class="admonition-title">See also</p>
<p><a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#timePartitioning">https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#timePartitioning</a></p>
</div>
</p></li>
<li><p><strong>view</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – [Optional] A dictionary containing definition for the view.
If set, it will create a view instead of a table:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#view">https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#view</a></p></li>
</ul>
</dd>
</dl>
<p><strong>Example</strong>:</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">view</span> <span class="o">=</span> <span class="p">{</span>
<span class="s2">&quot;query&quot;</span><span class="p">:</span> <span class="s2">&quot;SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*` LIMIT 1000&quot;</span><span class="p">,</span>
<span class="s2">&quot;useLegacySql&quot;</span><span class="p">:</span> <span class="kc">False</span>
<span class="p">}</span>
</pre></div>
</div>
<dl class="field-list simple">
<dt class="field-odd">Returns</dt>
<dd class="field-odd"><p>None</p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.create_external_table">
<code class="descname">create_external_table</code><span class="sig-paren">(</span><em>self</em>, <em>external_project_dataset_table</em>, <em>schema_fields</em>, <em>source_uris</em>, <em>source_format='CSV'</em>, <em>autodetect=False</em>, <em>compression='NONE'</em>, <em>ignore_unknown_values=False</em>, <em>max_bad_records=0</em>, <em>skip_leading_rows=0</em>, <em>field_delimiter='</em>, <em>'</em>, <em>quote_character=None</em>, <em>allow_quoted_newlines=False</em>, <em>allow_jagged_rows=False</em>, <em>src_fmt_configs=None</em>, <em>labels=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.create_external_table"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.create_external_table" title="Permalink to this definition"></a></dt>
<dd><p>Creates a new external table in the dataset with the data in Google
Cloud Storage. See here:</p>
<p><a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource">https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource</a></p>
<p>for more details about these parameters.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>external_project_dataset_table</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The dotted <code class="docutils literal notranslate"><span class="pre">(&lt;project&gt;.|&lt;project&gt;:)&lt;dataset&gt;.&lt;table&gt;($&lt;partition&gt;)</span></code> BigQuery
table name to create external table.
If <code class="docutils literal notranslate"><span class="pre">&lt;project&gt;</span></code> is not included, project will be the
project defined in the connection json.</p></li>
<li><p><strong>schema_fields</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – The schema field list as defined here:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource">https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource</a></p></li>
<li><p><strong>source_uris</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – The source Google Cloud
Storage URI (e.g. gs://some-bucket/some-file.txt). A single wild
per-object name can be used.</p></li>
<li><p><strong>source_format</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – File format to export.</p></li>
<li><p><strong>autodetect</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Try to detect schema and format options automatically.
Any option specified explicitly will be honored.</p></li>
<li><p><strong>compression</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – [Optional] The compression type of the data source.
Possible values include GZIP and NONE.
The default value is NONE.
This setting is ignored for Google Cloud Bigtable,
Google Cloud Datastore backups and Avro formats.</p></li>
<li><p><strong>ignore_unknown_values</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – [Optional] Indicates if BigQuery should allow
extra values that are not represented in the table schema.
If true, the extra values are ignored. If false, records with extra columns
are treated as bad records, and if there are too many bad records, an
invalid error is returned in the job result.</p></li>
<li><p><strong>max_bad_records</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.7)"><em>int</em></a>) – The maximum number of bad records that BigQuery can
ignore when running the job.</p></li>
<li><p><strong>skip_leading_rows</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.7)"><em>int</em></a>) – Number of rows to skip when loading from a CSV.</p></li>
<li><p><strong>field_delimiter</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The delimiter to use when loading from a CSV.</p></li>
<li><p><strong>quote_character</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The value that is used to quote data sections in a CSV
file.</p></li>
<li><p><strong>allow_quoted_newlines</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Whether to allow quoted newlines (true) or not
(false).</p></li>
<li><p><strong>allow_jagged_rows</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Accept rows that are missing trailing optional columns.
The missing values are treated as nulls. If false, records with missing
trailing columns are treated as bad records, and if there are too many bad
records, an invalid error is returned in the job result. Only applicable when
soure_format is CSV.</p></li>
<li><p><strong>src_fmt_configs</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – configure optional fields specific to the source format</p></li>
<li><p><strong>labels</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – a dictionary containing labels for the table, passed to BigQuery</p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.patch_table">
<code class="descname">patch_table</code><span class="sig-paren">(</span><em>self</em>, <em>dataset_id</em>, <em>table_id</em>, <em>project_id=None</em>, <em>description=None</em>, <em>expiration_time=None</em>, <em>external_data_configuration=None</em>, <em>friendly_name=None</em>, <em>labels=None</em>, <em>schema=None</em>, <em>time_partitioning=None</em>, <em>view=None</em>, <em>require_partition_filter=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.patch_table"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.patch_table" title="Permalink to this definition"></a></dt>
<dd><p>Patch information in an existing table.
It only updates fileds that are provided in the request object.</p>
<p>Reference: <a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/patch">https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/patch</a></p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>dataset_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The dataset containing the table to be patched.</p></li>
<li><p><strong>table_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The Name of the table to be patched.</p></li>
<li><p><strong>project_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The project containing the table to be patched.</p></li>
<li><p><strong>description</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – [Optional] A user-friendly description of this table.</p></li>
<li><p><strong>expiration_time</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.7)"><em>int</em></a>) – [Optional] The time when this table expires,
in milliseconds since the epoch.</p></li>
<li><p><strong>external_data_configuration</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – [Optional] A dictionary containing
properties of a table stored outside of BigQuery.</p></li>
<li><p><strong>friendly_name</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – [Optional] A descriptive name for this table.</p></li>
<li><p><strong>labels</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – [Optional] A dictionary containing labels associated with this table.</p></li>
<li><p><strong>schema</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – <p>[Optional] If set, the schema field list as defined here:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema">https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema</a>
The supported schema modifications and unsupported schema modification are listed here:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/managing-table-schemas">https://cloud.google.com/bigquery/docs/managing-table-schemas</a>
<strong>Example</strong>:</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">schema</span><span class="o">=</span><span class="p">[{</span><span class="s2">&quot;name&quot;</span><span class="p">:</span> <span class="s2">&quot;emp_name&quot;</span><span class="p">,</span> <span class="s2">&quot;type&quot;</span><span class="p">:</span> <span class="s2">&quot;STRING&quot;</span><span class="p">,</span> <span class="s2">&quot;mode&quot;</span><span class="p">:</span> <span class="s2">&quot;REQUIRED&quot;</span><span class="p">},</span>
<span class="p">{</span><span class="s2">&quot;name&quot;</span><span class="p">:</span> <span class="s2">&quot;salary&quot;</span><span class="p">,</span> <span class="s2">&quot;type&quot;</span><span class="p">:</span> <span class="s2">&quot;INTEGER&quot;</span><span class="p">,</span> <span class="s2">&quot;mode&quot;</span><span class="p">:</span> <span class="s2">&quot;NULLABLE&quot;</span><span class="p">}]</span>
</pre></div>
</div>
</p></li>
<li><p><strong>time_partitioning</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – [Optional] A dictionary containing time-based partitioning
definition for the table.</p></li>
<li><p><strong>view</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – <p>[Optional] A dictionary containing definition for the view.
If set, it will patch a view instead of a table:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#view">https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#view</a>
<strong>Example</strong>:</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">view</span> <span class="o">=</span> <span class="p">{</span>
<span class="s2">&quot;query&quot;</span><span class="p">:</span> <span class="s2">&quot;SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*` LIMIT 500&quot;</span><span class="p">,</span>
<span class="s2">&quot;useLegacySql&quot;</span><span class="p">:</span> <span class="kc">False</span>
<span class="p">}</span>
</pre></div>
</div>
</p></li>
<li><p><strong>require_partition_filter</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – [Optional] If true, queries over the this table require a
partition filter. If false, queries over the table</p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_query">
<code class="descname">run_query</code><span class="sig-paren">(</span><em>self</em>, <em>bql=None</em>, <em>sql=None</em>, <em>destination_dataset_table=None</em>, <em>write_disposition='WRITE_EMPTY'</em>, <em>allow_large_results=False</em>, <em>flatten_results=None</em>, <em>udf_config=None</em>, <em>use_legacy_sql=None</em>, <em>maximum_billing_tier=None</em>, <em>maximum_bytes_billed=None</em>, <em>create_disposition='CREATE_IF_NEEDED'</em>, <em>query_params=None</em>, <em>labels=None</em>, <em>schema_update_options=()</em>, <em>priority='INTERACTIVE'</em>, <em>time_partitioning=None</em>, <em>api_resource_configs=None</em>, <em>cluster_fields=None</em>, <em>location=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.run_query"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_query" title="Permalink to this definition"></a></dt>
<dd><p>Executes a BigQuery SQL query. Optionally persists results in a BigQuery
table. See here:</p>
<p><a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/jobs">https://cloud.google.com/bigquery/docs/reference/v2/jobs</a></p>
<p>For more details about these parameters.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>bql</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – (Deprecated. Use <cite>sql</cite> parameter instead) The BigQuery SQL
to execute.</p></li>
<li><p><strong>sql</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The BigQuery SQL to execute.</p></li>
<li><p><strong>destination_dataset_table</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The dotted <code class="docutils literal notranslate"><span class="pre">&lt;dataset&gt;.&lt;table&gt;</span></code>
BigQuery table to save the query results.</p></li>
<li><p><strong>write_disposition</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – What to do if the table already exists in
BigQuery.</p></li>
<li><p><strong>allow_large_results</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Whether to allow large results.</p></li>
<li><p><strong>flatten_results</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – If true and query uses legacy SQL dialect, flattens
all nested and repeated fields in the query results. <code class="docutils literal notranslate"><span class="pre">allowLargeResults</span></code>
must be true if this is set to false. For standard SQL queries, this
flag is ignored and results are never flattened.</p></li>
<li><p><strong>udf_config</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – The User Defined Function configuration for the query.
See <a class="reference external" href="https://cloud.google.com/bigquery/user-defined-functions">https://cloud.google.com/bigquery/user-defined-functions</a> for details.</p></li>
<li><p><strong>use_legacy_sql</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Whether to use legacy SQL (true) or standard SQL (false).
If <cite>None</cite>, defaults to <cite>self.use_legacy_sql</cite>.</p></li>
<li><p><strong>api_resource_configs</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – a dictionary that contain params
‘configuration’ applied for Google BigQuery Jobs API:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs">https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs</a>
for example, {‘query’: {‘useQueryCache’: False}}. You could use it
if you need to provide some params that are not supported by the
BigQueryHook like args.</p></li>
<li><p><strong>maximum_billing_tier</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.7)"><em>int</em></a>) – Positive integer that serves as a
multiplier of the basic price.</p></li>
<li><p><strong>maximum_bytes_billed</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#float" title="(in Python v3.7)"><em>float</em></a>) – Limits the bytes billed for this job.
Queries that will have bytes billed beyond this limit will fail
(without incurring a charge). If unspecified, this will be
set to your project default.</p></li>
<li><p><strong>create_disposition</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – Specifies whether the job is allowed to
create new tables.</p></li>
<li><p><strong>query_params</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – a list of dictionary containing query parameter types and
values, passed to BigQuery</p></li>
<li><p><strong>labels</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – a dictionary containing labels for the job/query,
passed to BigQuery</p></li>
<li><p><strong>schema_update_options</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#tuple" title="(in Python v3.7)"><em>tuple</em></a>) – Allows the schema of the destination
table to be updated as a side effect of the query job.</p></li>
<li><p><strong>priority</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – Specifies a priority for the query.
Possible values include INTERACTIVE and BATCH.
The default value is INTERACTIVE.</p></li>
<li><p><strong>time_partitioning</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – configure optional time partitioning fields i.e.
partition by field, type and expiration as per API specifications.</p></li>
<li><p><strong>cluster_fields</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a><em>[</em><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a><em>]</em>) – Request that the result of this query be stored sorted
by one or more columns. This is only available in combination with
time_partitioning. The order of columns given determines the sort order.</p></li>
<li><p><strong>location</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The geographic location of the job. Required except for
US and EU. See details at
<a class="reference external" href="https://cloud.google.com/bigquery/docs/locations#specifying_your_location">https://cloud.google.com/bigquery/docs/locations#specifying_your_location</a></p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_extract">
<code class="descname">run_extract</code><span class="sig-paren">(</span><em>self</em>, <em>source_project_dataset_table</em>, <em>destination_cloud_storage_uris</em>, <em>compression='NONE'</em>, <em>export_format='CSV'</em>, <em>field_delimiter='</em>, <em>'</em>, <em>print_header=True</em>, <em>labels=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.run_extract"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_extract" title="Permalink to this definition"></a></dt>
<dd><p>Executes a BigQuery extract command to copy data from BigQuery to
Google Cloud Storage. See here:</p>
<p><a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/jobs">https://cloud.google.com/bigquery/docs/reference/v2/jobs</a></p>
<p>For more details about these parameters.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>source_project_dataset_table</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The dotted <code class="docutils literal notranslate"><span class="pre">&lt;dataset&gt;.&lt;table&gt;</span></code>
BigQuery table to use as the source data.</p></li>
<li><p><strong>destination_cloud_storage_uris</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – The destination Google Cloud
Storage URI (e.g. gs://some-bucket/some-file.txt). Follows
convention defined here:
https://cloud.google.com/bigquery/exporting-data-from-bigquery#exportingmultiple</p></li>
<li><p><strong>compression</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – Type of compression to use.</p></li>
<li><p><strong>export_format</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – File format to export.</p></li>
<li><p><strong>field_delimiter</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The delimiter to use when extracting to a CSV.</p></li>
<li><p><strong>print_header</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Whether to print a header for a CSV file extract.</p></li>
<li><p><strong>labels</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – a dictionary containing labels for the job/query,
passed to BigQuery</p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_copy">
<code class="descname">run_copy</code><span class="sig-paren">(</span><em>self</em>, <em>source_project_dataset_tables</em>, <em>destination_project_dataset_table</em>, <em>write_disposition='WRITE_EMPTY'</em>, <em>create_disposition='CREATE_IF_NEEDED'</em>, <em>labels=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.run_copy"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_copy" title="Permalink to this definition"></a></dt>
<dd><p>Executes a BigQuery copy command to copy data from one BigQuery table
to another. See here:</p>
<p><a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy">https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy</a></p>
<p>For more details about these parameters.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>source_project_dataset_tables</strong> (<em>list|string</em>) – One or more dotted
<code class="docutils literal notranslate"><span class="pre">(project:|project.)&lt;dataset&gt;.&lt;table&gt;</span></code>
BigQuery tables to use as the source data. Use a list if there are
multiple source tables.
If <code class="docutils literal notranslate"><span class="pre">&lt;project&gt;</span></code> is not included, project will be the project defined
in the connection json.</p></li>
<li><p><strong>destination_project_dataset_table</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The destination BigQuery
table. Format is: <code class="docutils literal notranslate"><span class="pre">(project:|project.)&lt;dataset&gt;.&lt;table&gt;</span></code></p></li>
<li><p><strong>write_disposition</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The write disposition if the table already exists.</p></li>
<li><p><strong>create_disposition</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The create disposition if the table doesn’t exist.</p></li>
<li><p><strong>labels</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – a dictionary containing labels for the job/query,
passed to BigQuery</p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_load">
<code class="descname">run_load</code><span class="sig-paren">(</span><em>self</em>, <em>destination_project_dataset_table</em>, <em>source_uris</em>, <em>schema_fields=None</em>, <em>source_format='CSV'</em>, <em>create_disposition='CREATE_IF_NEEDED'</em>, <em>skip_leading_rows=0</em>, <em>write_disposition='WRITE_EMPTY'</em>, <em>field_delimiter='</em>, <em>'</em>, <em>max_bad_records=0</em>, <em>quote_character=None</em>, <em>ignore_unknown_values=False</em>, <em>allow_quoted_newlines=False</em>, <em>allow_jagged_rows=False</em>, <em>schema_update_options=()</em>, <em>src_fmt_configs=None</em>, <em>time_partitioning=None</em>, <em>cluster_fields=None</em>, <em>autodetect=False</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.run_load"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_load" title="Permalink to this definition"></a></dt>
<dd><p>Executes a BigQuery load command to load data from Google Cloud Storage
to BigQuery. See here:</p>
<p><a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/jobs">https://cloud.google.com/bigquery/docs/reference/v2/jobs</a></p>
<p>For more details about these parameters.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>destination_project_dataset_table</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The dotted <code class="docutils literal notranslate"><span class="pre">(&lt;project&gt;.|&lt;project&gt;:)&lt;dataset&gt;.&lt;table&gt;($&lt;partition&gt;)</span></code> BigQuery
table to load data into. If <code class="docutils literal notranslate"><span class="pre">&lt;project&gt;</span></code> is not included, project will be the
project defined in the connection json. If a partition is specified the
operator will automatically append the data, create a new partition or create
a new DAY partitioned table.</p></li>
<li><p><strong>schema_fields</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – The schema field list as defined here:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load">https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load</a>
Required if autodetect=False; optional if autodetect=True.</p></li>
<li><p><strong>autodetect</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Attempt to autodetect the schema for CSV and JSON
source files.</p></li>
<li><p><strong>source_uris</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – The source Google Cloud
Storage URI (e.g. gs://some-bucket/some-file.txt). A single wild
per-object name can be used.</p></li>
<li><p><strong>source_format</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – File format to export.</p></li>
<li><p><strong>create_disposition</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The create disposition if the table doesn’t exist.</p></li>
<li><p><strong>skip_leading_rows</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.7)"><em>int</em></a>) – Number of rows to skip when loading from a CSV.</p></li>
<li><p><strong>write_disposition</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The write disposition if the table already exists.</p></li>
<li><p><strong>field_delimiter</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The delimiter to use when loading from a CSV.</p></li>
<li><p><strong>max_bad_records</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.7)"><em>int</em></a>) – The maximum number of bad records that BigQuery can
ignore when running the job.</p></li>
<li><p><strong>quote_character</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The value that is used to quote data sections in a CSV
file.</p></li>
<li><p><strong>ignore_unknown_values</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – [Optional] Indicates if BigQuery should allow
extra values that are not represented in the table schema.
If true, the extra values are ignored. If false, records with extra columns
are treated as bad records, and if there are too many bad records, an
invalid error is returned in the job result.</p></li>
<li><p><strong>allow_quoted_newlines</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Whether to allow quoted newlines (true) or not
(false).</p></li>
<li><p><strong>allow_jagged_rows</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – Accept rows that are missing trailing optional columns.
The missing values are treated as nulls. If false, records with missing
trailing columns are treated as bad records, and if there are too many bad
records, an invalid error is returned in the job result. Only applicable when
soure_format is CSV.</p></li>
<li><p><strong>schema_update_options</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#tuple" title="(in Python v3.7)"><em>tuple</em></a>) – Allows the schema of the destination
table to be updated as a side effect of the load job.</p></li>
<li><p><strong>src_fmt_configs</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – configure optional fields specific to the source format</p></li>
<li><p><strong>time_partitioning</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – configure optional time partitioning fields i.e.
partition by field, type and expiration as per API specifications.</p></li>
<li><p><strong>cluster_fields</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a><em>[</em><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a><em>]</em>) – Request that the result of this load be stored sorted
by one or more columns. This is only available in combination with
time_partitioning. The order of columns given determines the sort order.</p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_with_configuration">
<code class="descname">run_with_configuration</code><span class="sig-paren">(</span><em>self</em>, <em>configuration</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.run_with_configuration"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_with_configuration" title="Permalink to this definition"></a></dt>
<dd><p>Executes a BigQuery SQL query. See here:</p>
<p><a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/jobs">https://cloud.google.com/bigquery/docs/reference/v2/jobs</a></p>
<p>For more details about the configuration parameter.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><p><strong>configuration</strong> – The configuration parameter maps directly to
BigQuery’s configuration field in the job object. See
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/jobs">https://cloud.google.com/bigquery/docs/reference/v2/jobs</a> for
details.</p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.poll_job_complete">
<code class="descname">poll_job_complete</code><span class="sig-paren">(</span><em>self</em>, <em>job_id</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.poll_job_complete"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.poll_job_complete" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.cancel_query">
<code class="descname">cancel_query</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.cancel_query"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.cancel_query" title="Permalink to this definition"></a></dt>
<dd><p>Cancel all started queries that have not yet completed</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.get_schema">
<code class="descname">get_schema</code><span class="sig-paren">(</span><em>self</em>, <em>dataset_id</em>, <em>table_id</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.get_schema"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.get_schema" title="Permalink to this definition"></a></dt>
<dd><p>Get the schema for a given datset.table.
see <a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/tables#resource">https://cloud.google.com/bigquery/docs/reference/v2/tables#resource</a></p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>dataset_id</strong> – the dataset ID of the requested table</p></li>
<li><p><strong>table_id</strong> – the table ID of the requested table</p></li>
</ul>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p>a table schema</p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.get_tabledata">
<code class="descname">get_tabledata</code><span class="sig-paren">(</span><em>self</em>, <em>dataset_id</em>, <em>table_id</em>, <em>max_results=None</em>, <em>selected_fields=None</em>, <em>page_token=None</em>, <em>start_index=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.get_tabledata"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.get_tabledata" title="Permalink to this definition"></a></dt>
<dd><p>Get the data of a given dataset.table and optionally with selected columns.
see <a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list">https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list</a></p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>dataset_id</strong> – the dataset ID of the requested table.</p></li>
<li><p><strong>table_id</strong> – the table ID of the requested table.</p></li>
<li><p><strong>max_results</strong> – the maximum results to return.</p></li>
<li><p><strong>selected_fields</strong> – List of fields to return (comma-separated). If
unspecified, all fields are returned.</p></li>
<li><p><strong>page_token</strong> – page token, returned from a previous call,
identifying the result set.</p></li>
<li><p><strong>start_index</strong> – zero based index of the starting row to read.</p></li>
</ul>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p>map containing the requested rows.</p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_table_delete">
<code class="descname">run_table_delete</code><span class="sig-paren">(</span><em>self</em>, <em>deletion_dataset_table</em>, <em>ignore_if_missing=False</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.run_table_delete"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_table_delete" title="Permalink to this definition"></a></dt>
<dd><p>Delete an existing table from the dataset;
If the table does not exist, return an error unless ignore_if_missing
is set to True.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>deletion_dataset_table</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – A dotted
<code class="docutils literal notranslate"><span class="pre">(&lt;project&gt;.|&lt;project&gt;:)&lt;dataset&gt;.&lt;table&gt;</span></code> that indicates which table
will be deleted.</p></li>
<li><p><strong>ignore_if_missing</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – if True, then return success even if the
requested table does not exist.</p></li>
</ul>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p></p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_table_upsert">
<code class="descname">run_table_upsert</code><span class="sig-paren">(</span><em>self</em>, <em>dataset_id</em>, <em>table_resource</em>, <em>project_id=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.run_table_upsert"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_table_upsert" title="Permalink to this definition"></a></dt>
<dd><p>creates a new, empty table in the dataset;
If the table already exists, update the existing table.
Since BigQuery does not natively allow table upserts, this is not an
atomic operation.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>dataset_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – the dataset to upsert the table into.</p></li>
<li><p><strong>table_resource</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – a table resource. see
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/v2/tables#resource">https://cloud.google.com/bigquery/docs/reference/v2/tables#resource</a></p></li>
<li><p><strong>project_id</strong> – the project to upsert the table into. If None,
project will be self.project_id.</p></li>
</ul>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p></p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_grant_dataset_view_access">
<code class="descname">run_grant_dataset_view_access</code><span class="sig-paren">(</span><em>self</em>, <em>source_dataset</em>, <em>view_dataset</em>, <em>view_table</em>, <em>source_project=None</em>, <em>view_project=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.run_grant_dataset_view_access"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.run_grant_dataset_view_access" title="Permalink to this definition"></a></dt>
<dd><p>Grant authorized view access of a dataset to a view table.
If this view has already been granted access to the dataset, do nothing.
This method is not atomic. Running it may clobber a simultaneous update.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>source_dataset</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – the source dataset</p></li>
<li><p><strong>view_dataset</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – the dataset that the view is in</p></li>
<li><p><strong>view_table</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – the table of the view</p></li>
<li><p><strong>source_project</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – the project of the source dataset. If None,
self.project_id will be used.</p></li>
<li><p><strong>view_project</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – the project that the view is in. If None,
self.project_id will be used.</p></li>
</ul>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p>the datasets resource of the source dataset.</p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.create_empty_dataset">
<code class="descname">create_empty_dataset</code><span class="sig-paren">(</span><em>self</em>, <em>dataset_id=''</em>, <em>project_id=''</em>, <em>dataset_reference=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.create_empty_dataset"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.create_empty_dataset" title="Permalink to this definition"></a></dt>
<dd><p>Create a new empty dataset:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert">https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert</a></p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>project_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The name of the project where we want to create
an empty a dataset. Don’t need to provide, if projectId in dataset_reference.</p></li>
<li><p><strong>dataset_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The id of dataset. Don’t need to provide,
if datasetId in dataset_reference.</p></li>
<li><p><strong>dataset_reference</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – Dataset reference that could be provided
with request body. More info:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource">https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource</a></p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.delete_dataset">
<code class="descname">delete_dataset</code><span class="sig-paren">(</span><em>self</em>, <em>project_id</em>, <em>dataset_id</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.delete_dataset"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.delete_dataset" title="Permalink to this definition"></a></dt>
<dd><p>Delete a dataset of Big query in your project.
:param project_id: The name of the project where we have the dataset .
:type project_id: str
:param dataset_id: The dataset to be delete.
:type dataset_id: str
:return:</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.get_dataset">
<code class="descname">get_dataset</code><span class="sig-paren">(</span><em>self</em>, <em>dataset_id</em>, <em>project_id=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.get_dataset"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.get_dataset" title="Permalink to this definition"></a></dt>
<dd><p>Method returns dataset_resource if dataset exist
and raised 404 error if dataset does not exist</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>dataset_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The BigQuery Dataset ID</p></li>
<li><p><strong>project_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The GCP Project ID</p></li>
</ul>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p><p>dataset_resource</p>
<div class="admonition seealso">
<p class="admonition-title">See also</p>
<p>For more information, see Dataset Resource content:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource">https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource</a></p>
</div>
</p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.get_datasets_list">
<code class="descname">get_datasets_list</code><span class="sig-paren">(</span><em>self</em>, <em>project_id=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.get_datasets_list"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.get_datasets_list" title="Permalink to this definition"></a></dt>
<dd><p>Method returns full list of BigQuery datasets in the current project</p>
<div class="admonition seealso">
<p class="admonition-title">See also</p>
<p>For more information, see:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list">https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list</a></p>
</div>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><p><strong>project_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – Google Cloud Project for which you
try to get all datasets</p>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p><p>datasets_list</p>
<p>Example of returned datasets_list:</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span> <span class="p">{</span>
<span class="s2">&quot;kind&quot;</span><span class="p">:</span><span class="s2">&quot;bigquery#dataset&quot;</span><span class="p">,</span>
<span class="s2">&quot;location&quot;</span><span class="p">:</span><span class="s2">&quot;US&quot;</span><span class="p">,</span>
<span class="s2">&quot;id&quot;</span><span class="p">:</span><span class="s2">&quot;your-project:dataset_2_test&quot;</span><span class="p">,</span>
<span class="s2">&quot;datasetReference&quot;</span><span class="p">:{</span>
<span class="s2">&quot;projectId&quot;</span><span class="p">:</span><span class="s2">&quot;your-project&quot;</span><span class="p">,</span>
<span class="s2">&quot;datasetId&quot;</span><span class="p">:</span><span class="s2">&quot;dataset_2_test&quot;</span>
<span class="p">}</span>
<span class="p">},</span>
<span class="p">{</span>
<span class="s2">&quot;kind&quot;</span><span class="p">:</span><span class="s2">&quot;bigquery#dataset&quot;</span><span class="p">,</span>
<span class="s2">&quot;location&quot;</span><span class="p">:</span><span class="s2">&quot;US&quot;</span><span class="p">,</span>
<span class="s2">&quot;id&quot;</span><span class="p">:</span><span class="s2">&quot;your-project:dataset_1_test&quot;</span><span class="p">,</span>
<span class="s2">&quot;datasetReference&quot;</span><span class="p">:{</span>
<span class="s2">&quot;projectId&quot;</span><span class="p">:</span><span class="s2">&quot;your-project&quot;</span><span class="p">,</span>
<span class="s2">&quot;datasetId&quot;</span><span class="p">:</span><span class="s2">&quot;dataset_1_test&quot;</span>
<span class="p">}</span>
<span class="p">}</span>
<span class="p">]</span>
</pre></div>
</div>
</p>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.insert_all">
<code class="descname">insert_all</code><span class="sig-paren">(</span><em>self</em>, <em>project_id</em>, <em>dataset_id</em>, <em>table_id</em>, <em>rows</em>, <em>ignore_unknown_values=False</em>, <em>skip_invalid_rows=False</em>, <em>fail_on_error=False</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryBaseCursor.insert_all"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor.insert_all" title="Permalink to this definition"></a></dt>
<dd><p>Method to stream data into BigQuery one record at a time without needing
to run a load job</p>
<div class="admonition seealso">
<p class="admonition-title">See also</p>
<p>For more information, see:
<a class="reference external" href="https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll">https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll</a></p>
</div>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>project_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The name of the project where we have the table</p></li>
<li><p><strong>dataset_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The name of the dataset where we have the table</p></li>
<li><p><strong>table_id</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The name of the table</p></li>
<li><p><strong>rows</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – the rows to insert</p></li>
</ul>
</dd>
</dl>
<dl class="simple">
<dt><strong>Example or rows</strong>:</dt><dd><p>rows=[{“json”: {“a_key”: “a_value_0”}}, {“json”: {“a_key”: “a_value_1”}}]</p>
</dd>
</dl>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>ignore_unknown_values</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – [Optional] Accept rows that contain values
that do not match the schema. The unknown values are ignored.
The default value is false, which treats unknown values as errors.</p></li>
<li><p><strong>skip_invalid_rows</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – [Optional] Insert all valid rows of a request,
even if invalid rows exist. The default value is false, which causes
the entire request to fail if any invalid rows exist.</p></li>
<li><p><strong>fail_on_error</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.7)"><em>bool</em></a>) – [Optional] Force the task to fail if any errors occur.
The default value is false, which indicates the task should not fail
even if any insertion errors occur.</p></li>
</ul>
</dd>
</dl>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor">
<em class="property">class </em><code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">BigQueryCursor</code><span class="sig-paren">(</span><em>service</em>, <em>project_id</em>, <em>use_legacy_sql=True</em>, <em>location=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor" title="Permalink to this definition"></a></dt>
<dd><p>Bases:<a class="reference internal" href="#airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor" title="airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.bigquery_hook.BigQueryBaseCursor</span></code></a></p>
<p>A very basic BigQuery PEP 249 cursor implementation. The PyHive PEP 249
implementation was used as a reference:</p>
<p><a class="reference external" href="https://github.com/dropbox/PyHive/blob/master/pyhive/presto.py">https://github.com/dropbox/PyHive/blob/master/pyhive/presto.py</a>
<a class="reference external" href="https://github.com/dropbox/PyHive/blob/master/pyhive/common.py">https://github.com/dropbox/PyHive/blob/master/pyhive/common.py</a></p>
<dl class="attribute">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.description">
<code class="descname">description</code><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.description"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.description" title="Permalink to this definition"></a></dt>
<dd><p>The schema description method is not currently implemented.</p>
</dd></dl>
<dl class="attribute">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.rowcount">
<code class="descname">rowcount</code><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.rowcount"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.rowcount" title="Permalink to this definition"></a></dt>
<dd><p>By default, return -1 to indicate that this is not supported.</p>
</dd></dl>
<dl class="attribute">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.arraysize">
<code class="descname">arraysize</code><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.arraysize"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.arraysize" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.close">
<code class="descname">close</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.close"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.close" title="Permalink to this definition"></a></dt>
<dd><p>By default, do nothing</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.execute">
<code class="descname">execute</code><span class="sig-paren">(</span><em>self</em>, <em>operation</em>, <em>parameters=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.execute"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.execute" title="Permalink to this definition"></a></dt>
<dd><p>Executes a BigQuery query, and returns the job ID.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>operation</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The query to execute.</p></li>
<li><p><strong>parameters</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.7)"><em>dict</em></a>) – Parameters to substitute into the query.</p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.executemany">
<code class="descname">executemany</code><span class="sig-paren">(</span><em>self</em>, <em>operation</em>, <em>seq_of_parameters</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.executemany"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.executemany" title="Permalink to this definition"></a></dt>
<dd><p>Execute a BigQuery query multiple times with different parameters.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>operation</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.7)"><em>str</em></a>) – The query to execute.</p></li>
<li><p><strong>seq_of_parameters</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.7)"><em>list</em></a>) – List of dictionary parameters to substitute into the
query.</p></li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.fetchone">
<code class="descname">fetchone</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.fetchone"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.fetchone" title="Permalink to this definition"></a></dt>
<dd><p>Fetch the next row of a query result set.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.next">
<code class="descname">next</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.next"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.next" title="Permalink to this definition"></a></dt>
<dd><p>Helper method for fetchone, which returns the next row from a buffer.
If the buffer is empty, attempts to paginate through the result set for
the next page, and load it into the buffer.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.fetchmany">
<code class="descname">fetchmany</code><span class="sig-paren">(</span><em>self</em>, <em>size=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.fetchmany"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.fetchmany" title="Permalink to this definition"></a></dt>
<dd><p>Fetch the next set of rows of a query result, returning a sequence of sequences
(e.g. a list of tuples). An empty sequence is returned when no more rows are
available. The number of rows to fetch per call is specified by the parameter.
If it is not given, the cursor’s arraysize determines the number of rows to be
fetched. The method should try to fetch as many rows as indicated by the size
parameter. If this is not possible due to the specified number of rows not being
available, fewer rows may be returned. An <code class="xref py py-class docutils literal notranslate"><span class="pre">Error</span></code>
(or subclass) exception is raised if the previous call to
<a class="reference internal" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.execute" title="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.execute"><code class="xref py py-meth docutils literal notranslate"><span class="pre">execute()</span></code></a> did not produce any result set or no call was issued yet.</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.fetchall">
<code class="descname">fetchall</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.fetchall"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.fetchall" title="Permalink to this definition"></a></dt>
<dd><p>Fetch all (remaining) rows of a query result, returning them as a sequence of
sequences (e.g. a list of tuples).</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.get_arraysize">
<code class="descname">get_arraysize</code><span class="sig-paren">(</span><em>self</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.get_arraysize"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.get_arraysize" title="Permalink to this definition"></a></dt>
<dd><p>Specifies the number of rows to fetch at a time with .fetchmany()</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.set_arraysize">
<code class="descname">set_arraysize</code><span class="sig-paren">(</span><em>self</em>, <em>arraysize</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.set_arraysize"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.set_arraysize" title="Permalink to this definition"></a></dt>
<dd><p>Specifies the number of rows to fetch at a time with .fetchmany()</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.setinputsizes">
<code class="descname">setinputsizes</code><span class="sig-paren">(</span><em>self</em>, <em>sizes</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.setinputsizes"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.setinputsizes" title="Permalink to this definition"></a></dt>
<dd><p>Does nothing by default</p>
</dd></dl>
<dl class="method">
<dt id="airflow.contrib.hooks.bigquery_hook.BigQueryCursor.setoutputsize">
<code class="descname">setoutputsize</code><span class="sig-paren">(</span><em>self</em>, <em>size</em>, <em>column=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#BigQueryCursor.setoutputsize"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook.BigQueryCursor.setoutputsize" title="Permalink to this definition"></a></dt>
<dd><p>Does nothing by default</p>
</dd></dl>
</dd></dl>
<dl class="function">
<dt id="airflow.contrib.hooks.bigquery_hook._bind_parameters">
<code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">_bind_parameters</code><span class="sig-paren">(</span><em>operation</em>, <em>parameters</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#_bind_parameters"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook._bind_parameters" title="Permalink to this definition"></a></dt>
<dd><p>Helper method that binds parameters to a SQL query.</p>
</dd></dl>
<dl class="function">
<dt id="airflow.contrib.hooks.bigquery_hook._escape">
<code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">_escape</code><span class="sig-paren">(</span><em>s</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#_escape"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook._escape" title="Permalink to this definition"></a></dt>
<dd><p>Helper method that escapes parameters to a SQL query.</p>
</dd></dl>
<dl class="function">
<dt id="airflow.contrib.hooks.bigquery_hook._bq_cast">
<code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">_bq_cast</code><span class="sig-paren">(</span><em>string_field</em>, <em>bq_type</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#_bq_cast"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook._bq_cast" title="Permalink to this definition"></a></dt>
<dd><p>Helper method that casts a BigQuery row to the appropriate data types.
This is useful because BigQuery returns all fields as strings.</p>
</dd></dl>
<dl class="function">
<dt id="airflow.contrib.hooks.bigquery_hook._split_tablename">
<code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">_split_tablename</code><span class="sig-paren">(</span><em>table_input</em>, <em>default_project_id</em>, <em>var_name=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#_split_tablename"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook._split_tablename" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="function">
<dt id="airflow.contrib.hooks.bigquery_hook._cleanse_time_partitioning">
<code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">_cleanse_time_partitioning</code><span class="sig-paren">(</span><em>destination_dataset_table</em>, <em>time_partitioning_in</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#_cleanse_time_partitioning"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook._cleanse_time_partitioning" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="function">
<dt id="airflow.contrib.hooks.bigquery_hook._validate_value">
<code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">_validate_value</code><span class="sig-paren">(</span><em>key</em>, <em>value</em>, <em>expected_type</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#_validate_value"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook._validate_value" title="Permalink to this definition"></a></dt>
<dd><p>function to check expected type and raise
error if type is not correct</p>
</dd></dl>
<dl class="function">
<dt id="airflow.contrib.hooks.bigquery_hook._api_resource_configs_duplication_check">
<code class="descclassname">airflow.contrib.hooks.bigquery_hook.</code><code class="descname">_api_resource_configs_duplication_check</code><span class="sig-paren">(</span><em>key</em>, <em>value</em>, <em>config_dict</em>, <em>config_dict_name='api_resource_configs'</em><span class="sig-paren">)</span><a class="reference internal" href="../../../../../_modules/airflow/contrib/hooks/bigquery_hook.html#_api_resource_configs_duplication_check"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#airflow.contrib.hooks.bigquery_hook._api_resource_configs_duplication_check" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
</div>
</div>
</div>
</div>
<footer>
<div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
<a href="../cassandra_hook/index.html" class="btn btn-neutral float-right" title="airflow.contrib.hooks.cassandra_hook" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a>
<a href="../azure_fileshare_hook/index.html" class="btn btn-neutral float-left" title="airflow.contrib.hooks.azure_fileshare_hook" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> Previous</a>
</div>
<hr/>
<div role="contentinfo">
<p>
</p>
</div>
Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
</footer>
</div>
</div>
</section>
</div>
<script type="text/javascript">
jQuery(function () {
SphinxRtdTheme.Navigation.enable(true);
});
</script>
</body>
</html>