blob: 2f660b465f91eced2410be753b2acb26d1548818 [file] [log] [blame]
<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Integration &mdash; Airflow Documentation</title>
<script type="text/javascript" src="_static/js/modernizr.min.js"></script>
<script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
<script type="text/javascript" src="_static/jquery.js"></script>
<script type="text/javascript" src="_static/underscore.js"></script>
<script type="text/javascript" src="_static/doctools.js"></script>
<script type="text/javascript" src="_static/language_data.js"></script>
<script type="text/javascript" src="_static/js/theme.js"></script>
<link rel="stylesheet" href="_static/css/theme.css" type="text/css" />
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
<link rel="index" title="Index" href="genindex.html" />
<link rel="search" title="Search" href="search.html" />
<link rel="next" title="Metrics" href="metrics.html" />
<link rel="prev" title="Experimental Rest API" href="api.html" />
<script>
document.addEventListener('DOMContentLoaded', function() {
var el = document.getElementById('changelog');
if (el !== null ) {
// [AIRFLOW-...]
el.innerHTML = el.innerHTML.replace(
/\[(AIRFLOW-[\d]+)\]/g,
`<a href="https://issues.apache.org/jira/browse/$1">[$1]</a>`
);
// (#...)
el.innerHTML = el.innerHTML.replace(
/\(#([\d]+)\)/g,
`<a href="https://github.com/apache/airflow/pull/$1">(#$1)</a>`
);
};
})
</script>
<style>
.example-header {
position: relative;
background: #9AAA7A;
padding: 8px 16px;
margin-bottom: 0;
}
.example-header--with-button {
padding-right: 166px;
}
.example-header:after{
content: '';
display: table;
clear: both;
}
.example-title {
display:block;
padding: 4px;
margin-right: 16px;
color: white;
overflow-x: auto;
}
.example-header-button {
top: 8px;
right: 16px;
position: absolute;
}
.example-header + .highlight-python {
margin-top: 0 !important;
}
.viewcode-button {
display: inline-block;
padding: 8px 16px;
border: 0;
margin: 0;
outline: 0;
border-radius: 2px;
-webkit-box-shadow: 0 3px 5px 0 rgba(0,0,0,.3);
box-shadow: 0 3px 6px 0 rgba(0,0,0,.3);
color: #404040;
background-color: #e7e7e7;
cursor: pointer;
font-size: 16px;
font-weight: 500;
line-height: 1;
text-decoration: none;
text-overflow: ellipsis;
overflow: hidden;
text-transform: uppercase;
-webkit-transition: background-color .2s;
transition: background-color .2s;
vertical-align: middle;
white-space: nowrap;
}
.viewcode-button:visited {
color: #404040;
}
.viewcode-button:hover, .viewcode-button:focus {
color: #404040;
background-color: #d6d6d6;
}
</style>
<script type="application/javascript">
window.ga=window.ga||function(){(ga.q=ga.q||[]).push(arguments)};ga.l=+new Date;
ga("create", "UA-140539454-1", "auto");
ga("send", "pageview");
</script>
<script async src="https://www.google-analytics.com/analytics.js"></script>
</head>
<body class="wy-body-for-nav">
<div class="wy-grid-for-nav">
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-scroll">
<div class="wy-side-nav-search" >
<a href="index.html" class="icon icon-home"> Airflow
</a>
<div class="version">
1.10.3
</div>
<div role="search">
<form id="rtd-search-form" class="wy-form" action="search.html" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
<ul class="current">
<li class="toctree-l1"><a class="reference internal" href="project.html">Project</a></li>
<li class="toctree-l1"><a class="reference internal" href="license.html">License</a></li>
<li class="toctree-l1"><a class="reference internal" href="start.html">Quick Start</a></li>
<li class="toctree-l1"><a class="reference internal" href="installation.html">Installation</a></li>
<li class="toctree-l1"><a class="reference internal" href="tutorial.html">Tutorial</a></li>
<li class="toctree-l1"><a class="reference internal" href="howto/index.html">How-to Guides</a></li>
<li class="toctree-l1"><a class="reference internal" href="ui.html">UI / Screenshots</a></li>
<li class="toctree-l1"><a class="reference internal" href="concepts.html">Concepts</a></li>
<li class="toctree-l1"><a class="reference internal" href="profiling.html">Data Profiling</a></li>
<li class="toctree-l1"><a class="reference internal" href="cli.html">Command Line Interface</a></li>
<li class="toctree-l1"><a class="reference internal" href="scheduler.html">Scheduling &amp; Triggers</a></li>
<li class="toctree-l1"><a class="reference internal" href="plugins.html">Plugins</a></li>
<li class="toctree-l1"><a class="reference internal" href="security.html">Security</a></li>
<li class="toctree-l1"><a class="reference internal" href="timezone.html">Time zones</a></li>
<li class="toctree-l1"><a class="reference internal" href="api.html">Experimental Rest API</a></li>
<li class="toctree-l1 current"><a class="current reference internal" href="#">Integration</a><ul>
<li class="toctree-l2"><a class="reference internal" href="#azure-microsoft-azure">Azure: Microsoft Azure</a><ul>
<li class="toctree-l3"><a class="reference internal" href="#azure-blob-storage">Azure Blob Storage</a></li>
<li class="toctree-l3"><a class="reference internal" href="#azure-file-share">Azure File Share</a></li>
<li class="toctree-l3"><a class="reference internal" href="#logging">Logging</a></li>
<li class="toctree-l3"><a class="reference internal" href="#azure-cosmosdb">Azure CosmosDB</a></li>
<li class="toctree-l3"><a class="reference internal" href="#azure-data-lake">Azure Data Lake</a></li>
<li class="toctree-l3"><a class="reference internal" href="#azure-container-instances">Azure Container Instances</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="#aws-amazon-web-services">AWS: Amazon Web Services</a><ul>
<li class="toctree-l3"><a class="reference internal" href="#aws-emr">AWS EMR</a></li>
<li class="toctree-l3"><a class="reference internal" href="#aws-s3">AWS S3</a></li>
<li class="toctree-l3"><a class="reference internal" href="#aws-batch-service">AWS Batch Service</a></li>
<li class="toctree-l3"><a class="reference internal" href="#aws-redshift">AWS RedShift</a></li>
<li class="toctree-l3"><a class="reference internal" href="#aws-dynamodb">AWS DynamoDB</a></li>
<li class="toctree-l3"><a class="reference internal" href="#aws-lambda">AWS Lambda</a></li>
<li class="toctree-l3"><a class="reference internal" href="#aws-kinesis">AWS Kinesis</a></li>
<li class="toctree-l3"><a class="reference internal" href="#amazon-sagemaker">Amazon SageMaker</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="#databricks">Databricks</a></li>
<li class="toctree-l2"><a class="reference internal" href="#gcp-google-cloud-platform">GCP: Google Cloud Platform</a><ul>
<li class="toctree-l3"><a class="reference internal" href="#id2">Logging</a></li>
<li class="toctree-l3"><a class="reference internal" href="#googlecloudbasehook">GoogleCloudBaseHook</a></li>
<li class="toctree-l3"><a class="reference internal" href="#bigquery">BigQuery</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-spanner">Cloud Spanner</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-sql">Cloud SQL</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-bigtable">Cloud Bigtable</a></li>
<li class="toctree-l3"><a class="reference internal" href="#compute-engine">Compute Engine</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-functions">Cloud Functions</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-dataflow">Cloud DataFlow</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-dataproc">Cloud DataProc</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-datastore">Cloud Datastore</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-ml-engine">Cloud ML Engine</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-storage">Cloud Storage</a></li>
<li class="toctree-l3"><a class="reference internal" href="#transfer-service">Transfer Service</a></li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-vision">Cloud Vision</a><ul>
<li class="toctree-l4"><a class="reference internal" href="#cloud-vision-product-search-operators">Cloud Vision Product Search Operators</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="#cloud-translate">Cloud Translate</a><ul>
<li class="toctree-l4"><a class="reference internal" href="#cloud-translate-text-operators">Cloud Translate Text Operators</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="#google-kubernetes-engine">Google Kubernetes Engine</a></li>
<li class="toctree-l3"><a class="reference internal" href="#google-natural-language">Google Natural Language</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="#qubole">Qubole</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="metrics.html">Metrics</a></li>
<li class="toctree-l1"><a class="reference internal" href="kubernetes.html">Kubernetes</a></li>
<li class="toctree-l1"><a class="reference internal" href="lineage.html">Lineage</a></li>
<li class="toctree-l1"><a class="reference internal" href="changelog.html">Changelog</a></li>
<li class="toctree-l1"><a class="reference internal" href="faq.html">FAQ</a></li>
<li class="toctree-l1"><a class="reference internal" href="macros.html">Macros reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="_api/index.html">API Reference</a></li>
</ul>
</div>
</div>
</nav>
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
<nav class="wy-nav-top" aria-label="top navigation">
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="index.html">Airflow</a>
</nav>
<div class="wy-nav-content">
<div class="rst-content">
<div role="navigation" aria-label="breadcrumbs navigation">
<ul class="wy-breadcrumbs">
<li><a href="index.html">Docs</a> &raquo;</li>
<li>Integration</li>
<li class="wy-breadcrumbs-aside">
<a href="_sources/integration.rst.txt" rel="nofollow"> View page source</a>
</li>
</ul>
<hr/>
</div>
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
<div itemprop="articleBody">
<div class="section" id="integration">
<h1>Integration<a class="headerlink" href="#integration" title="Permalink to this headline"></a></h1>
<ul class="simple">
<li><p><a class="reference internal" href="#azure"><span class="std std-ref">Azure: Microsoft Azure</span></a></p></li>
<li><p><a class="reference internal" href="#aws"><span class="std std-ref">AWS: Amazon Web Services</span></a></p></li>
<li><p><a class="reference internal" href="#databricks"><span class="std std-ref">Databricks</span></a></p></li>
<li><p><a class="reference internal" href="#gcp"><span class="std std-ref">GCP: Google Cloud Platform</span></a></p></li>
<li><p><a class="reference internal" href="#qubole"><span class="std std-ref">Qubole</span></a></p></li>
</ul>
<div class="section" id="azure-microsoft-azure">
<span id="azure"></span><h2>Azure: Microsoft Azure<a class="headerlink" href="#azure-microsoft-azure" title="Permalink to this headline"></a></h2>
<p>Airflow has limited support for Microsoft Azure: interfaces exist only for Azure Blob
Storage and Azure Data Lake. Hook, Sensor and Operator for Blob Storage and
Azure Data Lake Hook are in contrib section.</p>
<div class="section" id="azure-blob-storage">
<h3>Azure Blob Storage<a class="headerlink" href="#azure-blob-storage" title="Permalink to this headline"></a></h3>
<p>All classes communicate via the Window Azure Storage Blob protocol. Make sure that a
Airflow connection of type <cite>wasb</cite> exists. Authorization can be done by supplying a
login (=Storage account name) and password (=KEY), or login and SAS token in the extra
field (see connection <cite>wasb_default</cite> for an example).</p>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/wasb_hook/index.html#airflow.contrib.hooks.wasb_hook.WasbHook" title="airflow.contrib.hooks.wasb_hook.WasbHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.wasb_hook.WasbHook</span></code></a></dt><dd><p>Interface with Azure Blob Storage.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/sensors/wasb_sensor/index.html#airflow.contrib.sensors.wasb_sensor.WasbBlobSensor" title="airflow.contrib.sensors.wasb_sensor.WasbBlobSensor"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.sensors.wasb_sensor.WasbBlobSensor</span></code></a></dt><dd><p>Checks if a blob is present on Azure Blob storage.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/wasb_delete_blob_operator/index.html#airflow.contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator" title="airflow.contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator</span></code></a></dt><dd><p>Deletes blob(s) on Azure Blob Storage.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/sensors/wasb_sensor/index.html#airflow.contrib.sensors.wasb_sensor.WasbPrefixSensor" title="airflow.contrib.sensors.wasb_sensor.WasbPrefixSensor"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.sensors.wasb_sensor.WasbPrefixSensor</span></code></a></dt><dd><p>Checks if blobs matching a prefix are present on Azure Blob storage.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/file_to_wasb/index.html#airflow.contrib.operators.file_to_wasb.FileToWasbOperator" title="airflow.contrib.operators.file_to_wasb.FileToWasbOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.file_to_wasb.FileToWasbOperator</span></code></a></dt><dd><p>Uploads a local file to a container as a blob.</p>
</dd>
</dl>
</div>
<div class="section" id="azure-file-share">
<h3>Azure File Share<a class="headerlink" href="#azure-file-share" title="Permalink to this headline"></a></h3>
<p>Cloud variant of a SMB file share. Make sure that a Airflow connection of
type <cite>wasb</cite> exists. Authorization can be done by supplying a login (=Storage account name)
and password (=Storage account key), or login and SAS token in the extra field
(see connection <cite>wasb_default</cite> for an example).</p>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/azure_fileshare_hook/index.html#airflow.contrib.hooks.azure_fileshare_hook.AzureFileShareHook" title="airflow.contrib.hooks.azure_fileshare_hook.AzureFileShareHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.azure_fileshare_hook.AzureFileShareHook</span></code></a>:</dt><dd><p>Interface with Azure File Share.</p>
</dd>
</dl>
</div>
<div class="section" id="logging">
<h3>Logging<a class="headerlink" href="#logging" title="Permalink to this headline"></a></h3>
<p>Airflow can be configured to read and write task logs in Azure Blob Storage.
See <a class="reference internal" href="howto/write-logs.html#write-logs-azure"><span class="std std-ref">Writing Logs to Azure Blob Storage</span></a>.</p>
</div>
<div class="section" id="azure-cosmosdb">
<h3>Azure CosmosDB<a class="headerlink" href="#azure-cosmosdb" title="Permalink to this headline"></a></h3>
<p>AzureCosmosDBHook communicates via the Azure Cosmos library. Make sure that a
Airflow connection of type <cite>azure_cosmos</cite> exists. Authorization can be done by supplying a
login (=Endpoint uri), password (=secret key) and extra fields database_name and collection_name to specify the
default database and collection to use (see connection <cite>azure_cosmos_default</cite> for an example).</p>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/azure_cosmos_hook/index.html#airflow.contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook" title="airflow.contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook</span></code></a></dt><dd><p>Interface with Azure CosmosDB.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/azure_cosmos_operator/index.html#airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator" title="airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator</span></code></a></dt><dd><p>Simple operator to insert document into CosmosDB.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/sensors/azure_cosmos_sensor/index.html#airflow.contrib.sensors.azure_cosmos_sensor.AzureCosmosDocumentSensor" title="airflow.contrib.sensors.azure_cosmos_sensor.AzureCosmosDocumentSensor"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.sensors.azure_cosmos_sensor.AzureCosmosDocumentSensor</span></code></a></dt><dd><p>Simple sensor to detect document existence in CosmosDB.</p>
</dd>
</dl>
</div>
<div class="section" id="azure-data-lake">
<h3>Azure Data Lake<a class="headerlink" href="#azure-data-lake" title="Permalink to this headline"></a></h3>
<p>AzureDataLakeHook communicates via a REST API compatible with WebHDFS. Make sure that a
Airflow connection of type <cite>azure_data_lake</cite> exists. Authorization can be done by supplying a
login (=Client ID), password (=Client Secret) and extra fields tenant (Tenant) and account_name (Account Name)
(see connection <cite>azure_data_lake_default</cite> for an example).</p>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/azure_data_lake_hook/index.html#airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook" title="airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook</span></code></a></dt><dd><p>Interface with Azure Data Lake.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/adls_list_operator/index.html#airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator" title="airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator</span></code></a></dt><dd><p>Lists the files located in a specified Azure Data Lake path.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/adls_to_gcs/index.html#airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator" title="airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator</span></code></a></dt><dd><p>Copies files from an Azure Data Lake path to a Google Cloud Storage bucket.</p>
</dd>
</dl>
</div>
<div class="section" id="azure-container-instances">
<h3>Azure Container Instances<a class="headerlink" href="#azure-container-instances" title="Permalink to this headline"></a></h3>
<p>Azure Container Instances provides a method to run a docker container without having to worry
about managing infrastructure. The AzureContainerInstanceHook requires a service principal. The
credentials for this principal can either be defined in the extra field <code class="docutils literal notranslate"><span class="pre">key_path</span></code>, as an
environment variable named <code class="docutils literal notranslate"><span class="pre">AZURE_AUTH_LOCATION</span></code>,
or by providing a login/password and tenantId in extras.</p>
<p>The AzureContainerRegistryHook requires a host/login/password to be defined in the connection.</p>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/azure_container_volume_hook/index.html#airflow.contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook" title="airflow.contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook</span></code></a></dt><dd><p>Interface with Azure Container Volumes</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/azure_container_instances_operator/index.html#airflow.contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator" title="airflow.contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator</span></code></a></dt><dd><p>Start/Monitor a new ACI.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/azure_container_instance_hook/index.html#airflow.contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook" title="airflow.contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook</span></code></a></dt><dd><p>Wrapper around a single ACI.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/azure_container_registry_hook/index.html#airflow.contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook" title="airflow.contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook</span></code></a></dt><dd><p>Interface with ACR</p>
</dd>
</dl>
</div>
</div>
<div class="section" id="aws-amazon-web-services">
<span id="aws"></span><h2>AWS: Amazon Web Services<a class="headerlink" href="#aws-amazon-web-services" title="Permalink to this headline"></a></h2>
<p>Airflow has extensive support for Amazon Web Services. But note that the Hooks, Sensors and
Operators are in the contrib section.</p>
<div class="section" id="aws-emr">
<h3>AWS EMR<a class="headerlink" href="#aws-emr" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/emr_hook/index.html#airflow.contrib.hooks.emr_hook.EmrHook" title="airflow.contrib.hooks.emr_hook.EmrHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.emr_hook.EmrHook</span></code></a></dt><dd><p>Interface with AWS EMR.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/emr_add_steps_operator/index.html#airflow.contrib.operators.emr_add_steps_operator.EmrAddStepsOperator" title="airflow.contrib.operators.emr_add_steps_operator.EmrAddStepsOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.emr_add_steps_operator.EmrAddStepsOperator</span></code></a></dt><dd><p>Adds steps to an existing EMR JobFlow.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/emr_create_job_flow_operator/index.html#airflow.contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator" title="airflow.contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator</span></code></a></dt><dd><p>Creates an EMR JobFlow, reading the config from the EMR connection.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/emr_terminate_job_flow_operator/index.html#airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator" title="airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator</span></code></a></dt><dd><p>Terminates an EMR JobFlow.</p>
</dd>
</dl>
</div>
<div class="section" id="aws-s3">
<h3>AWS S3<a class="headerlink" href="#aws-s3" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/hooks/S3_hook/index.html#airflow.hooks.S3_hook.S3Hook" title="airflow.hooks.S3_hook.S3Hook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.hooks.S3_hook.S3Hook</span></code></a></dt><dd><p>Interface with AWS S3.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/operators/s3_file_transform_operator/index.html#airflow.operators.s3_file_transform_operator.S3FileTransformOperator" title="airflow.operators.s3_file_transform_operator.S3FileTransformOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.operators.s3_file_transform_operator.S3FileTransformOperator</span></code></a></dt><dd><p>Copies data from a source S3 location to a temporary location on the local filesystem.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/s3_list_operator/index.html#airflow.contrib.operators.s3_list_operator.S3ListOperator" title="airflow.contrib.operators.s3_list_operator.S3ListOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.s3_list_operator.S3ListOperator</span></code></a></dt><dd><p>Lists the files matching a key prefix from a S3 location.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/s3_to_gcs_operator/index.html#airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator" title="airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator</span></code></a></dt><dd><p>Syncs an S3 location with a Google Cloud Storage bucket.</p>
</dd>
<dt><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.s3_to_gcs_transfer_operator.S3ToGoogleCloudStorageTransferOperator</span></code></dt><dd><p>Syncs an S3 bucket with a Google Cloud Storage bucket using the GCP Storage Transfer Service.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/operators/s3_to_hive_operator/index.html#airflow.operators.s3_to_hive_operator.S3ToHiveTransfer" title="airflow.operators.s3_to_hive_operator.S3ToHiveTransfer"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.operators.s3_to_hive_operator.S3ToHiveTransfer</span></code></a></dt><dd><p>Moves data from S3 to Hive. The operator downloads a file from S3, stores the file locally before loading it into a Hive table.</p>
</dd>
</dl>
</div>
<div class="section" id="aws-batch-service">
<h3>AWS Batch Service<a class="headerlink" href="#aws-batch-service" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/awsbatch_operator/index.html#airflow.contrib.operators.awsbatch_operator.AWSBatchOperator" title="airflow.contrib.operators.awsbatch_operator.AWSBatchOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.awsbatch_operator.AWSBatchOperator</span></code></a></dt><dd><p>Execute a task on AWS Batch Service.</p>
</dd>
</dl>
</div>
<div class="section" id="aws-redshift">
<h3>AWS RedShift<a class="headerlink" href="#aws-redshift" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/sensors/aws_redshift_cluster_sensor/index.html#airflow.contrib.sensors.aws_redshift_cluster_sensor.AwsRedshiftClusterSensor" title="airflow.contrib.sensors.aws_redshift_cluster_sensor.AwsRedshiftClusterSensor"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.sensors.aws_redshift_cluster_sensor.AwsRedshiftClusterSensor</span></code></a></dt><dd><p>Waits for a Redshift cluster to reach a specific status.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/redshift_hook/index.html#airflow.contrib.hooks.redshift_hook.RedshiftHook" title="airflow.contrib.hooks.redshift_hook.RedshiftHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.redshift_hook.RedshiftHook</span></code></a></dt><dd><p>Interact with AWS Redshift, using the boto3 library.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/operators/redshift_to_s3_operator/index.html#airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer" title="airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer</span></code></a></dt><dd><p>Executes an unload command to S3 as CSV with or without headers.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/operators/s3_to_redshift_operator/index.html#airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer" title="airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer</span></code></a></dt><dd><p>Executes an copy command from S3 as CSV with or without headers.</p>
</dd>
</dl>
</div>
<div class="section" id="aws-dynamodb">
<h3>AWS DynamoDB<a class="headerlink" href="#aws-dynamodb" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/hive_to_dynamodb/index.html#airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator" title="airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator</span></code></a></dt><dd><p>Moves data from Hive to DynamoDB.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/aws_dynamodb_hook/index.html#airflow.contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook" title="airflow.contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook</span></code></a></dt><dd><p>Interface with AWS DynamoDB.</p>
</dd>
</dl>
</div>
<div class="section" id="aws-lambda">
<h3>AWS Lambda<a class="headerlink" href="#aws-lambda" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/aws_lambda_hook/index.html#airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook" title="airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook</span></code></a></dt><dd><p>Interface with AWS Lambda.</p>
</dd>
</dl>
</div>
<div class="section" id="aws-kinesis">
<h3>AWS Kinesis<a class="headerlink" href="#aws-kinesis" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/aws_firehose_hook/index.html#airflow.contrib.hooks.aws_firehose_hook.AwsFirehoseHook" title="airflow.contrib.hooks.aws_firehose_hook.AwsFirehoseHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.aws_firehose_hook.AwsFirehoseHook</span></code></a></dt><dd><p>Interface with AWS Kinesis Firehose.</p>
</dd>
</dl>
</div>
<div class="section" id="amazon-sagemaker">
<h3>Amazon SageMaker<a class="headerlink" href="#amazon-sagemaker" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/hooks/sagemaker_hook/index.html#airflow.contrib.hooks.sagemaker_hook.SageMakerHook" title="airflow.contrib.hooks.sagemaker_hook.SageMakerHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.sagemaker_hook.SageMakerHook</span></code></a></dt><dd><p>Interface with Amazon SageMaker.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/sagemaker_training_operator/index.html#airflow.contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator" title="airflow.contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator</span></code></a></dt><dd><p>Create a SageMaker training job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/sagemaker_tuning_operator/index.html#airflow.contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator" title="airflow.contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator</span></code></a></dt><dd><p>Create a SageMaker tuning job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/sagemaker_model_operator/index.html#airflow.contrib.operators.sagemaker_model_operator.SageMakerModelOperator" title="airflow.contrib.operators.sagemaker_model_operator.SageMakerModelOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.sagemaker_model_operator.SageMakerModelOperator</span></code></a></dt><dd><p>Create a SageMaker model.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/sagemaker_transform_operator/index.html#airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator" title="airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator</span></code></a></dt><dd><p>Create a SageMaker transform job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/sagemaker_endpoint_config_operator/index.html#airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator" title="airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator</span></code></a></dt><dd><p>Create a SageMaker endpoint config.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/sagemaker_endpoint_operator/index.html#airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator" title="airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator</span></code></a></dt><dd><p>Create a SageMaker endpoint.</p>
</dd>
</dl>
</div>
</div>
<div class="section" id="databricks">
<span id="id1"></span><h2>Databricks<a class="headerlink" href="#databricks" title="Permalink to this headline"></a></h2>
<p><a class="reference external" href="https://databricks.com/">Databricks</a> has contributed an Airflow operator which enables
submitting runs to the Databricks platform. Internally the operator talks to the
<code class="docutils literal notranslate"><span class="pre">api/2.0/jobs/runs/submit</span></code> <a class="reference external" href="https://docs.databricks.com/api/latest/jobs.html#runs-submit">endpoint</a>.</p>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/databricks_operator/index.html#airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator" title="airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator</span></code></a></dt><dd><p>Submits a Spark job run to Databricks using the
<a class="reference external" href="https://docs.databricks.com/api/latest/jobs.html#runs-submit">api/2.0/jobs/runs/submit</a>
API endpoint.</p>
</dd>
</dl>
</div>
<div class="section" id="gcp-google-cloud-platform">
<span id="gcp"></span><h2>GCP: Google Cloud Platform<a class="headerlink" href="#gcp-google-cloud-platform" title="Permalink to this headline"></a></h2>
<p>Airflow has extensive support for the Google Cloud Platform. But note that most Hooks and
Operators are in the contrib section. Meaning that they have a <em>beta</em> status, meaning that
they can have breaking changes between minor releases.</p>
<p>See the <a class="reference internal" href="howto/connection/gcp.html"><span class="doc">GCP connection type</span></a> documentation to
configure connections to GCP.</p>
<div class="section" id="id2">
<h3>Logging<a class="headerlink" href="#id2" title="Permalink to this headline"></a></h3>
<p>Airflow can be configured to read and write task logs in Google Cloud Storage.
See <a class="reference internal" href="howto/write-logs.html#write-logs-gcp"><span class="std std-ref">Writing Logs to Google Cloud Storage</span></a>.</p>
</div>
<div class="section" id="googlecloudbasehook">
<h3>GoogleCloudBaseHook<a class="headerlink" href="#googlecloudbasehook" title="Permalink to this headline"></a></h3>
<p>All hooks is based on <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_api_base_hook/index.html#airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook" title="airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook</span></code></a>.</p>
</div>
<div class="section" id="bigquery">
<h3>BigQuery<a class="headerlink" href="#bigquery" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_check_operator/index.html#airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator" title="airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator</span></code></a></dt><dd><p>Performs checks against a SQL query that will return a single row with different values.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_check_operator/index.html#airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator" title="airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator</span></code></a></dt><dd><p>Checks that the values of metrics given as SQL expressions are within a certain tolerance of the ones from days_back before.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_check_operator/index.html#airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator" title="airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator</span></code></a></dt><dd><p>Performs a simple value check using SQL code.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_get_data/index.html#airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator" title="airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator</span></code></a></dt><dd><p>Fetches the data from a BigQuery table and returns data in a python list</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_operator/index.html#airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator" title="airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator</span></code></a></dt><dd><p>Creates an empty BigQuery dataset.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_operator/index.html#airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator" title="airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator</span></code></a></dt><dd><p>Creates a new, empty table in the specified BigQuery dataset optionally with schema.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_operator/index.html#airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator" title="airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator</span></code></a></dt><dd><p>Creates a new, external table in the dataset with the data in Google Cloud Storage.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_operator/index.html#airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator" title="airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator</span></code></a></dt><dd><p>Deletes an existing BigQuery dataset.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_operator/index.html#airflow.contrib.operators.bigquery_operator.BigQueryOperator" title="airflow.contrib.operators.bigquery_operator.BigQueryOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_operator.BigQueryOperator</span></code></a></dt><dd><p>Executes BigQuery SQL queries in a specific BigQuery database.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_table_delete_operator/index.html#airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator" title="airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator</span></code></a></dt><dd><p>Deletes an existing BigQuery table.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_to_bigquery/index.html#airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator" title="airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator</span></code></a></dt><dd><p>Copy a BigQuery table to another BigQuery table.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/bigquery_to_gcs/index.html#airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator" title="airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator</span></code></a></dt><dd><p>Transfers a BigQuery table to a Google Cloud Storage bucket</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/bigquery_hook/index.html#airflow.contrib.hooks.bigquery_hook.BigQueryHook" title="airflow.contrib.hooks.bigquery_hook.BigQueryHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.bigquery_hook.BigQueryHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-spanner">
<h3>Cloud Spanner<a class="headerlink" href="#cloud-spanner" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_spanner_operator/index.html#airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator" title="airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator</span></code></a></dt><dd><p>deletes an existing database from a Google Cloud Spanner instance or returns success if the database is missing.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_spanner_operator/index.html#airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator" title="airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator</span></code></a></dt><dd><p>creates a new database in a Google Cloud instance or returns success if the database already exists.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_spanner_operator/index.html#airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator" title="airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator</span></code></a></dt><dd><p>executes an arbitrary DML query (INSERT, UPDATE, DELETE).</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_spanner_operator/index.html#airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator" title="airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator</span></code></a></dt><dd><p>updates the structure of a Google Cloud Spanner database.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_spanner_operator/index.html#airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator" title="airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator</span></code></a></dt><dd><p>deletes a Google Cloud Spanner instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_spanner_operator/index.html#airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator" title="airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator</span></code></a></dt><dd><p>creates a new Google Cloud Spanner instance, or if an instance with the same name exists, updates the instance.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_spanner_hook/index.html#airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook" title="airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-sql">
<h3>Cloud SQL<a class="headerlink" href="#cloud-sql" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator</span></code></a></dt><dd><p>create a new Cloud SQL instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator</span></code></a></dt><dd><p>creates a new database inside a Cloud SQL instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator</span></code></a></dt><dd><p>deletes a database from a Cloud SQL instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator</span></code></a></dt><dd><p>updates a database inside a Cloud SQL instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator</span></code></a></dt><dd><p>delete a Cloud SQL instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator</span></code></a></dt><dd><p>exports data from a Cloud SQL instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator</span></code></a></dt><dd><p>imports data into a Cloud SQL instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator</span></code></a></dt><dd><p>patch a Cloud SQL instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_sql_operator/index.html#airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator" title="airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator</span></code></a></dt><dd><p>run query in a Cloud SQL instance.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_sql_hook/index.html#airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook" title="airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook</span></code></a> and <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_sql_hook/index.html#airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook" title="airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-bigtable">
<h3>Cloud Bigtable<a class="headerlink" href="#cloud-bigtable" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_bigtable_operator/index.html#airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator" title="airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator</span></code></a></dt><dd><p>updates the number of nodes in a Google Cloud Bigtable cluster.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_bigtable_operator/index.html#airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator" title="airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator</span></code></a></dt><dd><p>creates a Cloud Bigtable instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_bigtable_operator/index.html#airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator" title="airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator</span></code></a></dt><dd><p>deletes a Google Cloud Bigtable instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_bigtable_operator/index.html#airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator" title="airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator</span></code></a></dt><dd><p>creates a table in a Google Cloud Bigtable instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_bigtable_operator/index.html#airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator" title="airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator</span></code></a></dt><dd><p>deletes a table in a Google Cloud Bigtable instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_bigtable_operator/index.html#airflow.contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor" title="airflow.contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor</span></code></a></dt><dd><p>(sensor) waits for a table to be fully replicated.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_bigtable_hook/index.html#airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook" title="airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="compute-engine">
<h3>Compute Engine<a class="headerlink" href="#compute-engine" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_compute_operator/index.html#airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator" title="airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator</span></code></a></dt><dd><p>start an existing Google Compute Engine instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_compute_operator/index.html#airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator" title="airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator</span></code></a></dt><dd><p>stop an existing Google Compute Engine instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_compute_operator/index.html#airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator" title="airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator</span></code></a></dt><dd><p>change the machine type for a stopped instance.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_compute_operator/index.html#airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator" title="airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator</span></code></a></dt><dd><p>copy the Instance Template, applying specified changes.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_compute_operator/index.html#airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator" title="airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator</span></code></a></dt><dd><p>patch the Instance Group Manager, replacing source Instance Template URL with the destination one.</p>
</dd>
</dl>
<p>The operators have the common base operator <a class="reference internal" href="_api/airflow/contrib/operators/gcp_compute_operator/index.html#airflow.contrib.operators.gcp_compute_operator.GceBaseOperator" title="airflow.contrib.operators.gcp_compute_operator.GceBaseOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_compute_operator.GceBaseOperator</span></code></a></p>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_compute_hook/index.html#airflow.contrib.hooks.gcp_compute_hook.GceHook" title="airflow.contrib.hooks.gcp_compute_hook.GceHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_compute_hook.GceHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-functions">
<h3>Cloud Functions<a class="headerlink" href="#cloud-functions" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_function_operator/index.html#airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator" title="airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator</span></code></a></dt><dd><p>deploy Google Cloud Function to Google Cloud Platform</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_function_operator/index.html#airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator" title="airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator</span></code></a></dt><dd><p>delete Google Cloud Function in Google Cloud Platform</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_function_hook/index.html#airflow.contrib.hooks.gcp_function_hook.GcfHook" title="airflow.contrib.hooks.gcp_function_hook.GcfHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_function_hook.GcfHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-dataflow">
<h3>Cloud DataFlow<a class="headerlink" href="#cloud-dataflow" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataflow_operator/index.html#airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator" title="airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator</span></code></a></dt><dd><p>launching Cloud Dataflow jobs written in Java.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataflow_operator/index.html#airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator" title="airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator</span></code></a></dt><dd><p>launching a templated Cloud DataFlow batch job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataflow_operator/index.html#airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator" title="airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator</span></code></a></dt><dd><p>launching Cloud Dataflow jobs written in python.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_dataflow_hook/index.html#airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook" title="airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-dataproc">
<h3>Cloud DataProc<a class="headerlink" href="#cloud-dataproc" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator" title="airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator</span></code></a></dt><dd><p>Create a new cluster on Google Cloud Dataproc.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator" title="airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator</span></code></a></dt><dd><p>Delete a cluster on Google Cloud Dataproc.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator" title="airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator</span></code></a></dt><dd><p>Scale up or down a cluster on Google Cloud Dataproc.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator" title="airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator</span></code></a></dt><dd><p>Start a Hadoop Job on a Cloud DataProc cluster.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataProcHiveOperator" title="airflow.contrib.operators.dataproc_operator.DataProcHiveOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataProcHiveOperator</span></code></a></dt><dd><p>Start a Hive query Job on a Cloud DataProc cluster.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataProcPigOperator" title="airflow.contrib.operators.dataproc_operator.DataProcPigOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataProcPigOperator</span></code></a></dt><dd><p>Start a Pig query Job on a Cloud DataProc cluster.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator" title="airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator</span></code></a></dt><dd><p>Start a PySpark Job on a Cloud DataProc cluster.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataProcSparkOperator" title="airflow.contrib.operators.dataproc_operator.DataProcSparkOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataProcSparkOperator</span></code></a></dt><dd><p>Start a Spark Job on a Cloud DataProc cluster.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator" title="airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator</span></code></a></dt><dd><p>Start a Spark SQL query Job on a Cloud DataProc cluster.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator" title="airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator</span></code></a></dt><dd><p>Instantiate a WorkflowTemplate Inline on Google Cloud Dataproc.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/dataproc_operator/index.html#airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator" title="airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator</span></code></a></dt><dd><p>Instantiate a WorkflowTemplate on Google Cloud Dataproc.</p>
</dd>
</dl>
</div>
<div class="section" id="cloud-datastore">
<h3>Cloud Datastore<a class="headerlink" href="#cloud-datastore" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/datastore_export_operator/index.html#airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator" title="airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator</span></code></a></dt><dd><p>Export entities from Google Cloud Datastore to Cloud Storage.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/datastore_import_operator/index.html#airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator" title="airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator</span></code></a></dt><dd><p>Import entities from Cloud Storage to Google Cloud Datastore.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/datastore_hook/index.html#airflow.contrib.hooks.datastore_hook.DatastoreHook" title="airflow.contrib.hooks.datastore_hook.DatastoreHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.datastore_hook.DatastoreHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-ml-engine">
<h3>Cloud ML Engine<a class="headerlink" href="#cloud-ml-engine" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/mlengine_operator/index.html#airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator" title="airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator</span></code></a></dt><dd><p>Start a Cloud ML Engine batch prediction job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/mlengine_operator/index.html#airflow.contrib.operators.mlengine_operator.MLEngineModelOperator" title="airflow.contrib.operators.mlengine_operator.MLEngineModelOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.mlengine_operator.MLEngineModelOperator</span></code></a></dt><dd><p>Manages a Cloud ML Engine model.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/mlengine_operator/index.html#airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator" title="airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator</span></code></a></dt><dd><p>Start a Cloud ML Engine training job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/mlengine_operator/index.html#airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator" title="airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator</span></code></a></dt><dd><p>Manages a Cloud ML Engine model version.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_mlengine_hook/index.html#airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook" title="airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-storage">
<h3>Cloud Storage<a class="headerlink" href="#cloud-storage" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/file_to_gcs/index.html#airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator" title="airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator</span></code></a></dt><dd><p>Uploads a file to Google Cloud Storage.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcs_acl_operator/index.html#airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator" title="airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator</span></code></a></dt><dd><p>Creates a new ACL entry on the specified bucket.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcs_acl_operator/index.html#airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator" title="airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator</span></code></a></dt><dd><p>Creates a new ACL entry on the specified object.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcs_download_operator/index.html#airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator" title="airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator</span></code></a></dt><dd><p>Downloads a file from Google Cloud Storage.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcs_list_operator/index.html#airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator" title="airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator</span></code></a></dt><dd><p>List all objects from the bucket with the give string prefix and delimiter in name.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcs_operator/index.html#airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator" title="airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator</span></code></a></dt><dd><p>Creates a new cloud storage bucket.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcs_to_bq/index.html#airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator" title="airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator</span></code></a></dt><dd><p>Loads files from Google cloud storage into BigQuery.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcs_to_gcs/index.html#airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator" title="airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator</span></code></a></dt><dd><p>Copies objects from a bucket to another, with renaming if requested.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/mysql_to_gcs/index.html#airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator" title="airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator</span></code></a></dt><dd><p>Copy data from any MySQL Database to Google cloud storage in JSON format.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcs_hook/index.html#airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook" title="airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="transfer-service">
<h3>Transfer Service<a class="headerlink" href="#transfer-service" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator" title="airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator</span></code></a></dt><dd><p>Deletes a transfer job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator" title="airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator</span></code></a></dt><dd><p>Creates a transfer job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator" title="airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator</span></code></a></dt><dd><p>Updates a transfer job.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator" title="airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator</span></code></a></dt><dd><p>Cancels a transfer operation.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator" title="airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator</span></code></a></dt><dd><p>Gets a transfer operation.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator" title="airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator</span></code></a></dt><dd><p>Pauses a transfer operation</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator" title="airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator</span></code></a></dt><dd><p>Resumes a transfer operation.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator" title="airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator</span></code></a></dt><dd><p>Gets a list of transfer operations.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator" title="airflow.contrib.operators.gcp_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator</span></code></a></dt><dd><p>Copies objects from a Google Cloud Storage bucket to another bucket.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_transfer_operator/index.html#airflow.contrib.operators.gcp_transfer_operator.S3ToGoogleCloudStorageTransferOperator" title="airflow.contrib.operators.gcp_transfer_operator.S3ToGoogleCloudStorageTransferOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_transfer_operator.S3ToGoogleCloudStorageTransferOperator</span></code></a></dt><dd><p>Synchronizes an S3 bucket with a Google Cloud Storage bucket.</p>
</dd>
<dt><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.sensors.gcp_transfer_operator.GCPTransferServiceWaitForJobStatusSensor</span></code></dt><dd><p>Waits for at least one operation belonging to the job to have the
expected status.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_transfer_hook/index.html#airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook" title="airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="cloud-vision">
<h3>Cloud Vision<a class="headerlink" href="#cloud-vision" title="Permalink to this headline"></a></h3>
<div class="section" id="cloud-vision-product-search-operators">
<h4>Cloud Vision Product Search Operators<a class="headerlink" href="#cloud-vision-product-search-operators" title="Permalink to this headline"></a></h4>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator</span></code></a></dt><dd><p>Adds a Product to the specified ProductSet.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionAnnotateImageOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionAnnotateImageOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionAnnotateImageOperator</span></code></a></dt><dd><p>Run image detection and annotation for an image.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator</span></code></a></dt><dd><p>Creates a new Product resource.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator</span></code></a></dt><dd><p>Permanently deletes a product and its reference images.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator</span></code></a></dt><dd><p>Gets information associated with a Product.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator</span></code></a></dt><dd><p>Creates a new ProductSet resource.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator</span></code></a></dt><dd><p>Permanently deletes a ProductSet.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator</span></code></a></dt><dd><p>Gets information associated with a ProductSet.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator</span></code></a></dt><dd><p>Makes changes to a ProductSet resource.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator</span></code></a></dt><dd><p>Makes changes to a Product resource.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator</span></code></a></dt><dd><p>Creates a new ReferenceImage resource.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_vision_operator/index.html#airflow.contrib.operators.gcp_vision_operator.CloudVisionRemoveProductFromProductSetOperator" title="airflow.contrib.operators.gcp_vision_operator.CloudVisionRemoveProductFromProductSetOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_vision_operator.CloudVisionRemoveProductFromProductSetOperator</span></code></a></dt><dd><p>Removes a Product from the specified ProductSet.</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_vision_hook/index.html#airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook" title="airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
</div>
<div class="section" id="cloud-translate">
<h3>Cloud Translate<a class="headerlink" href="#cloud-translate" title="Permalink to this headline"></a></h3>
<div class="section" id="cloud-translate-text-operators">
<h4>Cloud Translate Text Operators<a class="headerlink" href="#cloud-translate-text-operators" title="Permalink to this headline"></a></h4>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_translate_operator/index.html#airflow.contrib.operators.gcp_translate_operator.CloudTranslateTextOperator" title="airflow.contrib.operators.gcp_translate_operator.CloudTranslateTextOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_translate_operator.CloudTranslateTextOperator</span></code></a></dt><dd><p>Translate a string or list of strings.</p>
</dd>
</dl>
</div>
</div>
<div class="section" id="google-kubernetes-engine">
<h3>Google Kubernetes Engine<a class="headerlink" href="#google-kubernetes-engine" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_container_operator/index.html#airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator" title="airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator</span></code></a></dt><dd><p>Creates a Kubernetes Cluster in Google Cloud Platform</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_container_operator/index.html#airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator" title="airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator</span></code></a></dt><dd><p>Deletes a Kubernetes Cluster in Google Cloud Platform</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_container_operator/index.html#airflow.contrib.operators.gcp_container_operator.GKEPodOperator" title="airflow.contrib.operators.gcp_container_operator.GKEPodOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_container_operator.GKEPodOperator</span></code></a></dt><dd><p>Executes a task in a Kubernetes pod in the specified Google Kubernetes Engine cluster</p>
</dd>
</dl>
<p>They also use <a class="reference internal" href="_api/airflow/contrib/hooks/gcp_container_hook/index.html#airflow.contrib.hooks.gcp_container_hook.GKEClusterHook" title="airflow.contrib.hooks.gcp_container_hook.GKEClusterHook"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_container_hook.GKEClusterHook</span></code></a> to communicate with Google Cloud Platform.</p>
</div>
<div class="section" id="google-natural-language">
<h3>Google Natural Language<a class="headerlink" href="#google-natural-language" title="Permalink to this headline"></a></h3>
<dl class="simple">
<dt><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntities</span></code></dt><dd><p>Finds named entities (currently proper names and common nouns) in the text along with entity types,
salience, mentions for each entity, and other properties.</p>
</dd>
<dt><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitySentiment</span></code></dt><dd><p>Finds entities, similar to AnalyzeEntities in the text and analyzes sentiment associated with each
entity and its mentions.</p>
</dd>
<dt><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentiment</span></code></dt><dd><p>Analyzes the sentiment of the provided text.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/gcp_natural_language_operator/index.html#airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageClassifyTextOperator" title="airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageClassifyTextOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageClassifyTextOperator</span></code></a></dt><dd><p>Classifies a document into categories.</p>
</dd>
</dl>
<p>They also use <code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.hooks.gcp_natural_language_operator.CloudNaturalLanguageHook</span></code> to communicate with Google Cloud Platform.</p>
</div>
</div>
<div class="section" id="qubole">
<span id="id3"></span><h2>Qubole<a class="headerlink" href="#qubole" title="Permalink to this headline"></a></h2>
<p>Apache Airflow has a native operator and hooks to talk to <a class="reference external" href="https://qubole.com/">Qubole</a>,
which lets you submit your big data jobs directly to Qubole from Apache Airflow.</p>
<dl class="simple">
<dt><a class="reference internal" href="_api/airflow/contrib/operators/qubole_operator/index.html#airflow.contrib.operators.qubole_operator.QuboleOperator" title="airflow.contrib.operators.qubole_operator.QuboleOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.qubole_operator.QuboleOperator</span></code></a></dt><dd><p>Execute tasks (commands) on QDS (<a class="reference external" href="https://qubole.com">https://qubole.com</a>).</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/sensors/qubole_sensor/index.html#airflow.contrib.sensors.qubole_sensor.QubolePartitionSensor" title="airflow.contrib.sensors.qubole_sensor.QubolePartitionSensor"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.sensors.qubole_sensor.QubolePartitionSensor</span></code></a></dt><dd><p>Wait for a Hive partition to show up in QHS (Qubole Hive Service)
and check for its presence via QDS APIs</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/sensors/qubole_sensor/index.html#airflow.contrib.sensors.qubole_sensor.QuboleFileSensor" title="airflow.contrib.sensors.qubole_sensor.QuboleFileSensor"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.sensors.qubole_sensor.QuboleFileSensor</span></code></a></dt><dd><p>Wait for a file or folder to be present in cloud storage
and check for its presence via QDS APIs</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/qubole_check_operator/index.html#airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator" title="airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator</span></code></a></dt><dd><p>Performs checks against Qubole Commands. <code class="docutils literal notranslate"><span class="pre">QuboleCheckOperator</span></code> expects
a command that will be executed on QDS.</p>
</dd>
<dt><a class="reference internal" href="_api/airflow/contrib/operators/qubole_check_operator/index.html#airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator" title="airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator"><code class="xref py py-class docutils literal notranslate"><span class="pre">airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator</span></code></a></dt><dd><p>Performs a simple value check using Qubole command.
By default, each value on the first row of this
Qubole command is compared with a pre-defined value</p>
</dd>
</dl>
</div>
</div>
</div>
</div>
<footer>
<div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
<a href="metrics.html" class="btn btn-neutral float-right" title="Metrics" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a>
<a href="api.html" class="btn btn-neutral float-left" title="Experimental Rest API" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> Previous</a>
</div>
<hr/>
<div role="contentinfo">
<p>
</p>
</div>
Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
</footer>
</div>
</div>
</section>
</div>
<script type="text/javascript">
jQuery(function () {
SphinxRtdTheme.Navigation.enable(true);
});
</script>
</body>
</html>