| |
| |
| |
| |
| <!DOCTYPE html> |
| <!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]--> |
| <!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]--> |
| <head> |
| <meta charset="utf-8"> |
| |
| <meta name="viewport" content="width=device-width, initial-scale=1.0"> |
| |
| <title>airflow.hooks.hive_hooks — Airflow Documentation</title> |
| |
| |
| |
| |
| |
| |
| |
| |
| <script type="text/javascript" src="../../../_static/js/modernizr.min.js"></script> |
| |
| |
| <script type="text/javascript" id="documentation_options" data-url_root="../../../" src="../../../_static/documentation_options.js"></script> |
| <script type="text/javascript" src="../../../_static/jquery.js"></script> |
| <script type="text/javascript" src="../../../_static/underscore.js"></script> |
| <script type="text/javascript" src="../../../_static/doctools.js"></script> |
| <script type="text/javascript" src="../../../_static/language_data.js"></script> |
| |
| <script type="text/javascript" src="../../../_static/js/theme.js"></script> |
| |
| |
| |
| |
| <link rel="stylesheet" href="../../../_static/css/theme.css" type="text/css" /> |
| <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" /> |
| <link rel="index" title="Index" href="../../../genindex.html" /> |
| <link rel="search" title="Search" href="../../../search.html" /> |
| |
| <script> |
| document.addEventListener('DOMContentLoaded', function() { |
| var el = document.getElementById('changelog'); |
| if (el !== null ) { |
| // [AIRFLOW-...] |
| el.innerHTML = el.innerHTML.replace( |
| /\[(AIRFLOW-[\d]+)\]/g, |
| `<a href="https://issues.apache.org/jira/browse/$1">[$1]</a>` |
| ); |
| // (#...) |
| el.innerHTML = el.innerHTML.replace( |
| /\(#([\d]+)\)/g, |
| `<a href="https://github.com/apache/airflow/pull/$1">(#$1)</a>` |
| ); |
| }; |
| }) |
| </script> |
| <style> |
| .example-header { |
| position: relative; |
| background: #9AAA7A; |
| padding: 8px 16px; |
| margin-bottom: 0; |
| } |
| .example-header--with-button { |
| padding-right: 166px; |
| } |
| .example-header:after{ |
| content: ''; |
| display: table; |
| clear: both; |
| } |
| .example-title { |
| display:block; |
| padding: 4px; |
| margin-right: 16px; |
| color: white; |
| overflow-x: auto; |
| } |
| .example-header-button { |
| top: 8px; |
| right: 16px; |
| position: absolute; |
| } |
| .example-header + .highlight-python { |
| margin-top: 0 !important; |
| } |
| .viewcode-button { |
| display: inline-block; |
| padding: 8px 16px; |
| border: 0; |
| margin: 0; |
| outline: 0; |
| border-radius: 2px; |
| -webkit-box-shadow: 0 3px 5px 0 rgba(0,0,0,.3); |
| box-shadow: 0 3px 6px 0 rgba(0,0,0,.3); |
| color: #404040; |
| background-color: #e7e7e7; |
| cursor: pointer; |
| font-size: 16px; |
| font-weight: 500; |
| line-height: 1; |
| text-decoration: none; |
| text-overflow: ellipsis; |
| overflow: hidden; |
| text-transform: uppercase; |
| -webkit-transition: background-color .2s; |
| transition: background-color .2s; |
| vertical-align: middle; |
| white-space: nowrap; |
| } |
| .viewcode-button:visited { |
| color: #404040; |
| } |
| .viewcode-button:hover, .viewcode-button:focus { |
| color: #404040; |
| background-color: #d6d6d6; |
| } |
| </style> |
| |
| </head> |
| |
| <body class="wy-body-for-nav"> |
| |
| |
| <div class="wy-grid-for-nav"> |
| |
| <nav data-toggle="wy-nav-shift" class="wy-nav-side"> |
| <div class="wy-side-scroll"> |
| <div class="wy-side-nav-search" > |
| |
| |
| |
| <a href="../../../index.html" class="icon icon-home"> Airflow |
| |
| |
| |
| </a> |
| |
| |
| |
| |
| <div class="version"> |
| 1.10.3 |
| </div> |
| |
| |
| |
| |
| <div role="search"> |
| <form id="rtd-search-form" class="wy-form" action="../../../search.html" method="get"> |
| <input type="text" name="q" placeholder="Search docs" /> |
| <input type="hidden" name="check_keywords" value="yes" /> |
| <input type="hidden" name="area" value="default" /> |
| </form> |
| </div> |
| |
| |
| </div> |
| |
| <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation"> |
| |
| |
| |
| |
| |
| |
| <ul> |
| <li class="toctree-l1"><a class="reference internal" href="../../../project.html">Project</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../license.html">License</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../start.html">Quick Start</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../installation.html">Installation</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../tutorial.html">Tutorial</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../howto/index.html">How-to Guides</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../ui.html">UI / Screenshots</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../concepts.html">Concepts</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../profiling.html">Data Profiling</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../cli.html">Command Line Interface</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../scheduler.html">Scheduling & Triggers</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../plugins.html">Plugins</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../security.html">Security</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../timezone.html">Time zones</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../api.html">Experimental Rest API</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../integration.html">Integration</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../metrics.html">Metrics</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../kubernetes.html">Kubernetes</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../lineage.html">Lineage</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../changelog.html">Changelog</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../faq.html">FAQ</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../macros.html">Macros reference</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../_api/index.html">API Reference</a></li> |
| </ul> |
| |
| |
| |
| </div> |
| </div> |
| </nav> |
| |
| <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"> |
| |
| |
| <nav class="wy-nav-top" aria-label="top navigation"> |
| |
| <i data-toggle="wy-nav-top" class="fa fa-bars"></i> |
| <a href="../../../index.html">Airflow</a> |
| |
| </nav> |
| |
| |
| <div class="wy-nav-content"> |
| |
| <div class="rst-content"> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| <div role="navigation" aria-label="breadcrumbs navigation"> |
| |
| <ul class="wy-breadcrumbs"> |
| |
| <li><a href="../../../index.html">Docs</a> »</li> |
| |
| <li><a href="../../index.html">Module code</a> »</li> |
| |
| <li><a href="../hooks.html">airflow.hooks</a> »</li> |
| |
| <li>airflow.hooks.hive_hooks</li> |
| |
| |
| <li class="wy-breadcrumbs-aside"> |
| |
| </li> |
| |
| </ul> |
| |
| |
| <hr/> |
| </div> |
| <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article"> |
| <div itemprop="articleBody"> |
| |
| <h1>Source code for airflow.hooks.hive_hooks</h1><div class="highlight"><pre> |
| <span></span><span class="c1"># -*- coding: utf-8 -*-</span> |
| <span class="c1">#</span> |
| <span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span> |
| <span class="c1"># or more contributor license agreements. See the NOTICE file</span> |
| <span class="c1"># distributed with this work for additional information</span> |
| <span class="c1"># regarding copyright ownership. The ASF licenses this file</span> |
| <span class="c1"># to you under the Apache License, Version 2.0 (the</span> |
| <span class="c1"># "License"); you may not use this file except in compliance</span> |
| <span class="c1"># with the License. You may obtain a copy of the License at</span> |
| <span class="c1">#</span> |
| <span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span> |
| <span class="c1">#</span> |
| <span class="c1"># Unless required by applicable law or agreed to in writing,</span> |
| <span class="c1"># software distributed under the License is distributed on an</span> |
| <span class="c1"># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span> |
| <span class="c1"># KIND, either express or implied. See the License for the</span> |
| <span class="c1"># specific language governing permissions and limitations</span> |
| <span class="c1"># under the License.</span> |
| |
| <span class="kn">from</span> <span class="nn">__future__</span> <span class="k">import</span> <span class="n">print_function</span><span class="p">,</span> <span class="n">unicode_literals</span> |
| |
| <span class="kn">import</span> <span class="nn">contextlib</span> |
| <span class="kn">import</span> <span class="nn">os</span> |
| <span class="kn">import</span> <span class="nn">re</span> |
| <span class="kn">import</span> <span class="nn">subprocess</span> |
| <span class="kn">import</span> <span class="nn">time</span> |
| <span class="kn">from</span> <span class="nn">collections</span> <span class="k">import</span> <span class="n">OrderedDict</span> |
| <span class="kn">from</span> <span class="nn">tempfile</span> <span class="k">import</span> <span class="n">NamedTemporaryFile</span> |
| |
| <span class="kn">import</span> <span class="nn">six</span> |
| <span class="kn">import</span> <span class="nn">unicodecsv</span> <span class="k">as</span> <span class="nn">csv</span> |
| <span class="kn">from</span> <span class="nn">past.builtins</span> <span class="k">import</span> <span class="n">basestring</span> |
| <span class="kn">from</span> <span class="nn">past.builtins</span> <span class="k">import</span> <span class="n">unicode</span> |
| <span class="kn">from</span> <span class="nn">six.moves</span> <span class="k">import</span> <span class="nb">zip</span> |
| |
| <span class="kn">import</span> <span class="nn">airflow.security.utils</span> <span class="k">as</span> <span class="nn">utils</span> |
| <span class="kn">from</span> <span class="nn">airflow</span> <span class="k">import</span> <span class="n">configuration</span> |
| <span class="kn">from</span> <span class="nn">airflow.exceptions</span> <span class="k">import</span> <span class="n">AirflowException</span> |
| <span class="kn">from</span> <span class="nn">airflow.hooks.base_hook</span> <span class="k">import</span> <span class="n">BaseHook</span> |
| <span class="kn">from</span> <span class="nn">airflow.utils.file</span> <span class="k">import</span> <span class="n">TemporaryDirectory</span> |
| <span class="kn">from</span> <span class="nn">airflow.utils.helpers</span> <span class="k">import</span> <span class="n">as_flattened_list</span> |
| <span class="kn">from</span> <span class="nn">airflow.utils.operator_helpers</span> <span class="k">import</span> <span class="n">AIRFLOW_VAR_NAME_FORMAT_MAPPING</span> |
| |
| <div class="viewcode-block" id="HIVE_QUEUE_PRIORITIES"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES">[docs]</a><span class="n">HIVE_QUEUE_PRIORITIES</span> <span class="o">=</span> <span class="p">[</span><span class="s1">'VERY_HIGH'</span><span class="p">,</span> <span class="s1">'HIGH'</span><span class="p">,</span> <span class="s1">'NORMAL'</span><span class="p">,</span> <span class="s1">'LOW'</span><span class="p">,</span> <span class="s1">'VERY_LOW'</span><span class="p">]</span></div> |
| |
| |
| <div class="viewcode-block" id="get_context_from_env_var"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.get_context_from_env_var">[docs]</a><span class="k">def</span> <span class="nf">get_context_from_env_var</span><span class="p">():</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Extract context from env variable, e.g. dag_id, task_id and execution_date,</span> |
| <span class="sd"> so that they can be used inside BashOperator and PythonOperator.</span> |
| <span class="sd"> :return: The context of interest.</span> |
| <span class="sd"> """</span> |
| <span class="k">return</span> <span class="p">{</span><span class="n">format_map</span><span class="p">[</span><span class="s1">'default'</span><span class="p">]:</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">format_map</span><span class="p">[</span><span class="s1">'env_var_format'</span><span class="p">],</span> <span class="s1">''</span><span class="p">)</span> |
| <span class="k">for</span> <span class="n">format_map</span> <span class="ow">in</span> <span class="n">AIRFLOW_VAR_NAME_FORMAT_MAPPING</span><span class="o">.</span><span class="n">values</span><span class="p">()}</span></div> |
| |
| |
| <div class="viewcode-block" id="HiveCliHook"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveCliHook">[docs]</a><span class="k">class</span> <span class="nc">HiveCliHook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">):</span> |
| <span class="sd">"""Simple wrapper around the hive CLI.</span> |
| |
| <span class="sd"> It also supports the ``beeline``</span> |
| <span class="sd"> a lighter CLI that runs JDBC and is replacing the heavier</span> |
| <span class="sd"> traditional CLI. To enable ``beeline``, set the use_beeline param in the</span> |
| <span class="sd"> extra field of your connection as in ``{ "use_beeline": true }``</span> |
| |
| <span class="sd"> Note that you can also set default hive CLI parameters using the</span> |
| <span class="sd"> ``hive_cli_params`` to be used in your connection as in</span> |
| <span class="sd"> ``{"hive_cli_params": "-hiveconf mapred.job.tracker=some.jobtracker:444"}``</span> |
| <span class="sd"> Parameters passed here can be overridden by run_cli's hive_conf param</span> |
| |
| <span class="sd"> The extra connection parameter ``auth`` gets passed as in the ``jdbc``</span> |
| <span class="sd"> connection string as is.</span> |
| |
| <span class="sd"> :param mapred_queue: queue used by the Hadoop Scheduler (Capacity or Fair)</span> |
| <span class="sd"> :type mapred_queue: str</span> |
| <span class="sd"> :param mapred_queue_priority: priority within the job queue.</span> |
| <span class="sd"> Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW</span> |
| <span class="sd"> :type mapred_queue_priority: str</span> |
| <span class="sd"> :param mapred_job_name: This name will appear in the jobtracker.</span> |
| <span class="sd"> This can make monitoring easier.</span> |
| <span class="sd"> :type mapred_job_name: str</span> |
| <span class="sd"> """</span> |
| |
| <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span> |
| <span class="bp">self</span><span class="p">,</span> |
| <span class="n">hive_cli_conn_id</span><span class="o">=</span><span class="s2">"hive_cli_default"</span><span class="p">,</span> |
| <span class="n">run_as</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">mapred_queue</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">mapred_queue_priority</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">mapred_job_name</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="n">hive_cli_conn_id</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_params</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'hive_cli_params'</span><span class="p">,</span> <span class="s1">''</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">use_beeline</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'use_beeline'</span><span class="p">,</span> <span class="kc">False</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">auth</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'auth'</span><span class="p">,</span> <span class="s1">'noSasl'</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">conn</span> <span class="o">=</span> <span class="n">conn</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">run_as</span> <span class="o">=</span> <span class="n">run_as</span> |
| |
| <span class="k">if</span> <span class="n">mapred_queue_priority</span><span class="p">:</span> |
| <span class="n">mapred_queue_priority</span> <span class="o">=</span> <span class="n">mapred_queue_priority</span><span class="o">.</span><span class="n">upper</span><span class="p">()</span> |
| <span class="k">if</span> <span class="n">mapred_queue_priority</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">HIVE_QUEUE_PRIORITIES</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span> |
| <span class="s2">"Invalid Mapred Queue Priority. Valid values are: "</span> |
| <span class="s2">"</span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="s1">', '</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">HIVE_QUEUE_PRIORITIES</span><span class="p">)))</span> |
| |
| <span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span> <span class="o">=</span> <span class="n">mapred_queue</span> <span class="ow">or</span> <span class="n">configuration</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'hive'</span><span class="p">,</span> |
| <span class="s1">'default_hive_mapred_queue'</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue_priority</span> <span class="o">=</span> <span class="n">mapred_queue_priority</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">mapred_job_name</span> <span class="o">=</span> <span class="n">mapred_job_name</span> |
| |
| <div class="viewcode-block" id="HiveCliHook._prepare_cli_cmd"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveCliHook._prepare_cli_cmd">[docs]</a> <span class="k">def</span> <span class="nf">_prepare_cli_cmd</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> This function creates the command list from available information</span> |
| <span class="sd"> """</span> |
| <span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conn</span> |
| <span class="n">hive_bin</span> <span class="o">=</span> <span class="s1">'hive'</span> |
| <span class="n">cmd_extra</span> <span class="o">=</span> <span class="p">[]</span> |
| |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">use_beeline</span><span class="p">:</span> |
| <span class="n">hive_bin</span> <span class="o">=</span> <span class="s1">'beeline'</span> |
| <span class="n">jdbc_url</span> <span class="o">=</span> <span class="s2">"jdbc:hive2://</span><span class="si">{conn.host}</span><span class="s2">:</span><span class="si">{conn.port}</span><span class="s2">/</span><span class="si">{conn.schema}</span><span class="s2">"</span> |
| <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">conf</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span><span class="p">:</span> |
| <span class="n">template</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span> |
| <span class="s1">'principal'</span><span class="p">,</span> <span class="s2">"hive/_HOST@EXAMPLE.COM"</span><span class="p">)</span> |
| <span class="k">if</span> <span class="s2">"_HOST"</span> <span class="ow">in</span> <span class="n">template</span><span class="p">:</span> |
| <span class="n">template</span> <span class="o">=</span> <span class="n">utils</span><span class="o">.</span><span class="n">replace_hostname_pattern</span><span class="p">(</span> |
| <span class="n">utils</span><span class="o">.</span><span class="n">get_components</span><span class="p">(</span><span class="n">template</span><span class="p">))</span> |
| |
| <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">""</span> <span class="c1"># noqa</span> |
| <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'proxy_user'</span><span class="p">)</span> <span class="o">==</span> <span class="s2">"login"</span> <span class="ow">and</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">:</span> |
| <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">"hive.server2.proxy.user=</span><span class="si">{0}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">)</span> |
| <span class="k">elif</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'proxy_user'</span><span class="p">)</span> <span class="o">==</span> <span class="s2">"owner"</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">run_as</span><span class="p">:</span> |
| <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">"hive.server2.proxy.user=</span><span class="si">{0}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">run_as</span><span class="p">)</span> |
| |
| <span class="n">jdbc_url</span> <span class="o">+=</span> <span class="s2">";principal=</span><span class="si">{template}</span><span class="s2">;</span><span class="si">{proxy_user}</span><span class="s2">"</span> |
| <span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">auth</span><span class="p">:</span> |
| <span class="n">jdbc_url</span> <span class="o">+=</span> <span class="s2">";auth="</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">auth</span> |
| |
| <span class="n">jdbc_url</span> <span class="o">=</span> <span class="n">jdbc_url</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span> |
| <span class="n">jdbc_url</span> <span class="o">=</span> <span class="s1">'"</span><span class="si">{}</span><span class="s1">"'</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">jdbc_url</span><span class="p">)</span> |
| |
| <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-u'</span><span class="p">,</span> <span class="n">jdbc_url</span><span class="p">]</span> |
| <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">:</span> |
| <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-n'</span><span class="p">,</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">]</span> |
| <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">password</span><span class="p">:</span> |
| <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-p'</span><span class="p">,</span> <span class="n">conn</span><span class="o">.</span><span class="n">password</span><span class="p">]</span> |
| |
| <span class="n">hive_params_list</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_params</span><span class="o">.</span><span class="n">split</span><span class="p">()</span> |
| |
| <span class="k">return</span> <span class="p">[</span><span class="n">hive_bin</span><span class="p">]</span> <span class="o">+</span> <span class="n">cmd_extra</span> <span class="o">+</span> <span class="n">hive_params_list</span></div> |
| |
| <span class="nd">@staticmethod</span> |
| <div class="viewcode-block" id="HiveCliHook._prepare_hiveconf"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveCliHook._prepare_hiveconf">[docs]</a> <span class="k">def</span> <span class="nf">_prepare_hiveconf</span><span class="p">(</span><span class="n">d</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> This function prepares a list of hiveconf params</span> |
| <span class="sd"> from a dictionary of key value pairs.</span> |
| |
| <span class="sd"> :param d:</span> |
| <span class="sd"> :type d: dict</span> |
| |
| <span class="sd"> >>> hh = HiveCliHook()</span> |
| <span class="sd"> >>> hive_conf = {"hive.exec.dynamic.partition": "true",</span> |
| <span class="sd"> ... "hive.exec.dynamic.partition.mode": "nonstrict"}</span> |
| <span class="sd"> >>> hh._prepare_hiveconf(hive_conf)</span> |
| <span class="sd"> ["-hiveconf", "hive.exec.dynamic.partition=true",\</span> |
| <span class="sd"> "-hiveconf", "hive.exec.dynamic.partition.mode=nonstrict"]</span> |
| <span class="sd"> """</span> |
| <span class="k">if</span> <span class="ow">not</span> <span class="n">d</span><span class="p">:</span> |
| <span class="k">return</span> <span class="p">[]</span> |
| <span class="k">return</span> <span class="n">as_flattened_list</span><span class="p">(</span> |
| <span class="nb">zip</span><span class="p">([</span><span class="s2">"-hiveconf"</span><span class="p">]</span> <span class="o">*</span> <span class="nb">len</span><span class="p">(</span><span class="n">d</span><span class="p">),</span> |
| <span class="p">[</span><span class="s2">"</span><span class="si">{}</span><span class="s2">=</span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">d</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span></div> |
| <span class="p">)</span> |
| |
| <div class="viewcode-block" id="HiveCliHook.run_cli"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveCliHook.run_cli">[docs]</a> <span class="k">def</span> <span class="nf">run_cli</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">hive_conf</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Run an hql statement using the hive cli. If hive_conf is specified</span> |
| <span class="sd"> it should be a dict and the entries will be set as key/value pairs</span> |
| <span class="sd"> in HiveConf</span> |
| |
| |
| <span class="sd"> :param hive_conf: if specified these key value pairs will be passed</span> |
| <span class="sd"> to hive as ``-hiveconf "key"="value"``. Note that they will be</span> |
| <span class="sd"> passed after the ``hive_cli_params`` and thus will override</span> |
| <span class="sd"> whatever values are specified in the database.</span> |
| <span class="sd"> :type hive_conf: dict</span> |
| |
| <span class="sd"> >>> hh = HiveCliHook()</span> |
| <span class="sd"> >>> result = hh.run_cli("USE airflow;")</span> |
| <span class="sd"> >>> ("OK" in result)</span> |
| <span class="sd"> True</span> |
| <span class="sd"> """</span> |
| <span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conn</span> |
| <span class="n">schema</span> <span class="o">=</span> <span class="n">schema</span> <span class="ow">or</span> <span class="n">conn</span><span class="o">.</span><span class="n">schema</span> |
| <span class="k">if</span> <span class="n">schema</span><span class="p">:</span> |
| <span class="n">hql</span> <span class="o">=</span> <span class="s2">"USE </span><span class="si">{schema}</span><span class="s2">;</span><span class="se">\n</span><span class="si">{hql}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span> |
| |
| <span class="k">with</span> <span class="n">TemporaryDirectory</span><span class="p">(</span><span class="n">prefix</span><span class="o">=</span><span class="s1">'airflow_hiveop_'</span><span class="p">)</span> <span class="k">as</span> <span class="n">tmp_dir</span><span class="p">:</span> |
| <span class="k">with</span> <span class="n">NamedTemporaryFile</span><span class="p">(</span><span class="nb">dir</span><span class="o">=</span><span class="n">tmp_dir</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span> |
| <span class="n">hql</span> <span class="o">=</span> <span class="n">hql</span> <span class="o">+</span> <span class="s1">'</span><span class="se">\n</span><span class="s1">'</span> |
| <span class="n">f</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">hql</span><span class="o">.</span><span class="n">encode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">))</span> |
| <span class="n">f</span><span class="o">.</span><span class="n">flush</span><span class="p">()</span> |
| <span class="n">hive_cmd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_prepare_cli_cmd</span><span class="p">()</span> |
| <span class="n">env_context</span> <span class="o">=</span> <span class="n">get_context_from_env_var</span><span class="p">()</span> |
| <span class="c1"># Only extend the hive_conf if it is defined.</span> |
| <span class="k">if</span> <span class="n">hive_conf</span><span class="p">:</span> |
| <span class="n">env_context</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">hive_conf</span><span class="p">)</span> |
| <span class="n">hive_conf_params</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_prepare_hiveconf</span><span class="p">(</span><span class="n">env_context</span><span class="p">)</span> |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span><span class="p">:</span> |
| <span class="n">hive_conf_params</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span> |
| <span class="p">[</span><span class="s1">'-hiveconf'</span><span class="p">,</span> |
| <span class="s1">'mapreduce.job.queuename=</span><span class="si">{}</span><span class="s1">'</span> |
| <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span><span class="p">),</span> |
| <span class="s1">'-hiveconf'</span><span class="p">,</span> |
| <span class="s1">'mapred.job.queue.name=</span><span class="si">{}</span><span class="s1">'</span> |
| <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span><span class="p">),</span> |
| <span class="s1">'-hiveconf'</span><span class="p">,</span> |
| <span class="s1">'tez.job.queue.name=</span><span class="si">{}</span><span class="s1">'</span> |
| <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span><span class="p">)</span> |
| <span class="p">])</span> |
| |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue_priority</span><span class="p">:</span> |
| <span class="n">hive_conf_params</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span> |
| <span class="p">[</span><span class="s1">'-hiveconf'</span><span class="p">,</span> |
| <span class="s1">'mapreduce.job.priority=</span><span class="si">{}</span><span class="s1">'</span> |
| <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue_priority</span><span class="p">)])</span> |
| |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">mapred_job_name</span><span class="p">:</span> |
| <span class="n">hive_conf_params</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span> |
| <span class="p">[</span><span class="s1">'-hiveconf'</span><span class="p">,</span> |
| <span class="s1">'mapred.job.name=</span><span class="si">{}</span><span class="s1">'</span> |
| <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_job_name</span><span class="p">)])</span> |
| |
| <span class="n">hive_cmd</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">hive_conf_params</span><span class="p">)</span> |
| <span class="n">hive_cmd</span><span class="o">.</span><span class="n">extend</span><span class="p">([</span><span class="s1">'-f'</span><span class="p">,</span> <span class="n">f</span><span class="o">.</span><span class="n">name</span><span class="p">])</span> |
| |
| <span class="k">if</span> <span class="n">verbose</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"</span><span class="si">%s</span><span class="s2">"</span><span class="p">,</span> <span class="s2">" "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">hive_cmd</span><span class="p">))</span> |
| <span class="n">sp</span> <span class="o">=</span> <span class="n">subprocess</span><span class="o">.</span><span class="n">Popen</span><span class="p">(</span> |
| <span class="n">hive_cmd</span><span class="p">,</span> |
| <span class="n">stdout</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">,</span> |
| <span class="n">stderr</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">STDOUT</span><span class="p">,</span> |
| <span class="n">cwd</span><span class="o">=</span><span class="n">tmp_dir</span><span class="p">,</span> |
| <span class="n">close_fds</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">sp</span> <span class="o">=</span> <span class="n">sp</span> |
| <span class="n">stdout</span> <span class="o">=</span> <span class="s1">''</span> |
| <span class="k">while</span> <span class="kc">True</span><span class="p">:</span> |
| <span class="n">line</span> <span class="o">=</span> <span class="n">sp</span><span class="o">.</span><span class="n">stdout</span><span class="o">.</span><span class="n">readline</span><span class="p">()</span> |
| <span class="k">if</span> <span class="ow">not</span> <span class="n">line</span><span class="p">:</span> |
| <span class="k">break</span> |
| <span class="n">stdout</span> <span class="o">+=</span> <span class="n">line</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">verbose</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">line</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">)</span><span class="o">.</span><span class="n">strip</span><span class="p">())</span> |
| <span class="n">sp</span><span class="o">.</span><span class="n">wait</span><span class="p">()</span> |
| |
| <span class="k">if</span> <span class="n">sp</span><span class="o">.</span><span class="n">returncode</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="n">stdout</span><span class="p">)</span> |
| |
| <span class="k">return</span> <span class="n">stdout</span></div> |
| |
| <div class="viewcode-block" id="HiveCliHook.test_hql"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveCliHook.test_hql">[docs]</a> <span class="k">def</span> <span class="nf">test_hql</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Test an hql statement using the hive cli and EXPLAIN</span> |
| |
| <span class="sd"> """</span> |
| <span class="n">create</span><span class="p">,</span> <span class="n">insert</span><span class="p">,</span> <span class="n">other</span> <span class="o">=</span> <span class="p">[],</span> <span class="p">[],</span> <span class="p">[]</span> |
| <span class="k">for</span> <span class="n">query</span> <span class="ow">in</span> <span class="n">hql</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">';'</span><span class="p">):</span> <span class="c1"># naive</span> |
| <span class="n">query_original</span> <span class="o">=</span> <span class="n">query</span> |
| <span class="n">query</span> <span class="o">=</span> <span class="n">query</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span><span class="o">.</span><span class="n">strip</span><span class="p">()</span> |
| |
| <span class="k">if</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'create table'</span><span class="p">):</span> |
| <span class="n">create</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span> |
| <span class="k">elif</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">((</span><span class="s1">'set '</span><span class="p">,</span> |
| <span class="s1">'add jar '</span><span class="p">,</span> |
| <span class="s1">'create temporary function'</span><span class="p">)):</span> |
| <span class="n">other</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span> |
| <span class="k">elif</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'insert'</span><span class="p">):</span> |
| <span class="n">insert</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span> |
| <span class="n">other</span> <span class="o">=</span> <span class="s1">';'</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">other</span><span class="p">)</span> |
| <span class="k">for</span> <span class="n">query_set</span> <span class="ow">in</span> <span class="p">[</span><span class="n">create</span><span class="p">,</span> <span class="n">insert</span><span class="p">]:</span> |
| <span class="k">for</span> <span class="n">query</span> <span class="ow">in</span> <span class="n">query_set</span><span class="p">:</span> |
| |
| <span class="n">query_preview</span> <span class="o">=</span> <span class="s1">' '</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">())[:</span><span class="mi">50</span><span class="p">]</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Testing HQL [</span><span class="si">%s</span><span class="s2"> (...)]"</span><span class="p">,</span> <span class="n">query_preview</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">query_set</span> <span class="o">==</span> <span class="n">insert</span><span class="p">:</span> |
| <span class="n">query</span> <span class="o">=</span> <span class="n">other</span> <span class="o">+</span> <span class="s1">'; explain '</span> <span class="o">+</span> <span class="n">query</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="n">query</span> <span class="o">=</span> <span class="s1">'explain '</span> <span class="o">+</span> <span class="n">query</span> |
| <span class="k">try</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">query</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span> |
| <span class="k">except</span> <span class="n">AirflowException</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span> |
| <span class="n">message</span> <span class="o">=</span> <span class="n">e</span><span class="o">.</span><span class="n">args</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)[</span><span class="o">-</span><span class="mi">2</span><span class="p">]</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">message</span><span class="p">)</span> |
| <span class="n">error_loc</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">search</span><span class="p">(</span><span class="sa">r</span><span class="s1">'(\d+):(\d+)'</span><span class="p">,</span> <span class="n">message</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">error_loc</span> <span class="ow">and</span> <span class="n">error_loc</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">isdigit</span><span class="p">():</span> |
| <span class="n">lst</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">error_loc</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">))</span> |
| <span class="n">begin</span> <span class="o">=</span> <span class="nb">max</span><span class="p">(</span><span class="n">lst</span> <span class="o">-</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">0</span><span class="p">)</span> |
| <span class="n">end</span> <span class="o">=</span> <span class="nb">min</span><span class="p">(</span><span class="n">lst</span> <span class="o">+</span> <span class="mi">3</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)))</span> |
| <span class="n">context</span> <span class="o">=</span> <span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)[</span><span class="n">begin</span><span class="p">:</span><span class="n">end</span><span class="p">])</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Context :</span><span class="se">\n</span><span class="s2"> </span><span class="si">%s</span><span class="s2">"</span><span class="p">,</span> <span class="n">context</span><span class="p">)</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"SUCCESS"</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveCliHook.load_df"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveCliHook.load_df">[docs]</a> <span class="k">def</span> <span class="nf">load_df</span><span class="p">(</span> |
| <span class="bp">self</span><span class="p">,</span> |
| <span class="n">df</span><span class="p">,</span> |
| <span class="n">table</span><span class="p">,</span> |
| <span class="n">field_dict</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">delimiter</span><span class="o">=</span><span class="s1">','</span><span class="p">,</span> |
| <span class="n">encoding</span><span class="o">=</span><span class="s1">'utf8'</span><span class="p">,</span> |
| <span class="n">pandas_kwargs</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Loads a pandas DataFrame into hive.</span> |
| |
| <span class="sd"> Hive data types will be inferred if not passed but column names will</span> |
| <span class="sd"> not be sanitized.</span> |
| |
| <span class="sd"> :param df: DataFrame to load into a Hive table</span> |
| <span class="sd"> :type df: pandas.DataFrame</span> |
| <span class="sd"> :param table: target Hive table, use dot notation to target a</span> |
| <span class="sd"> specific database</span> |
| <span class="sd"> :type table: str</span> |
| <span class="sd"> :param field_dict: mapping from column name to hive data type.</span> |
| <span class="sd"> Note that it must be OrderedDict so as to keep columns' order.</span> |
| <span class="sd"> :type field_dict: collections.OrderedDict</span> |
| <span class="sd"> :param delimiter: field delimiter in the file</span> |
| <span class="sd"> :type delimiter: str</span> |
| <span class="sd"> :param encoding: str encoding to use when writing DataFrame to file</span> |
| <span class="sd"> :type encoding: str</span> |
| <span class="sd"> :param pandas_kwargs: passed to DataFrame.to_csv</span> |
| <span class="sd"> :type pandas_kwargs: dict</span> |
| <span class="sd"> :param kwargs: passed to self.load_file</span> |
| <span class="sd"> """</span> |
| |
| <span class="k">def</span> <span class="nf">_infer_field_types_from_df</span><span class="p">(</span><span class="n">df</span><span class="p">):</span> |
| <span class="n">DTYPE_KIND_HIVE_TYPE</span> <span class="o">=</span> <span class="p">{</span> |
| <span class="s1">'b'</span><span class="p">:</span> <span class="s1">'BOOLEAN'</span><span class="p">,</span> <span class="c1"># boolean</span> |
| <span class="s1">'i'</span><span class="p">:</span> <span class="s1">'BIGINT'</span><span class="p">,</span> <span class="c1"># signed integer</span> |
| <span class="s1">'u'</span><span class="p">:</span> <span class="s1">'BIGINT'</span><span class="p">,</span> <span class="c1"># unsigned integer</span> |
| <span class="s1">'f'</span><span class="p">:</span> <span class="s1">'DOUBLE'</span><span class="p">,</span> <span class="c1"># floating-point</span> |
| <span class="s1">'c'</span><span class="p">:</span> <span class="s1">'STRING'</span><span class="p">,</span> <span class="c1"># complex floating-point</span> |
| <span class="s1">'M'</span><span class="p">:</span> <span class="s1">'TIMESTAMP'</span><span class="p">,</span> <span class="c1"># datetime</span> |
| <span class="s1">'O'</span><span class="p">:</span> <span class="s1">'STRING'</span><span class="p">,</span> <span class="c1"># object</span> |
| <span class="s1">'S'</span><span class="p">:</span> <span class="s1">'STRING'</span><span class="p">,</span> <span class="c1"># (byte-)string</span> |
| <span class="s1">'U'</span><span class="p">:</span> <span class="s1">'STRING'</span><span class="p">,</span> <span class="c1"># Unicode</span> |
| <span class="s1">'V'</span><span class="p">:</span> <span class="s1">'STRING'</span> <span class="c1"># void</span> |
| <span class="p">}</span> |
| |
| <span class="n">d</span> <span class="o">=</span> <span class="n">OrderedDict</span><span class="p">()</span> |
| <span class="k">for</span> <span class="n">col</span><span class="p">,</span> <span class="n">dtype</span> <span class="ow">in</span> <span class="n">df</span><span class="o">.</span><span class="n">dtypes</span><span class="o">.</span><span class="n">iteritems</span><span class="p">():</span> |
| <span class="n">d</span><span class="p">[</span><span class="n">col</span><span class="p">]</span> <span class="o">=</span> <span class="n">DTYPE_KIND_HIVE_TYPE</span><span class="p">[</span><span class="n">dtype</span><span class="o">.</span><span class="n">kind</span><span class="p">]</span> |
| <span class="k">return</span> <span class="n">d</span> |
| |
| <span class="k">if</span> <span class="n">pandas_kwargs</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
| <span class="n">pandas_kwargs</span> <span class="o">=</span> <span class="p">{}</span> |
| |
| <span class="k">with</span> <span class="n">TemporaryDirectory</span><span class="p">(</span><span class="n">prefix</span><span class="o">=</span><span class="s1">'airflow_hiveop_'</span><span class="p">)</span> <span class="k">as</span> <span class="n">tmp_dir</span><span class="p">:</span> |
| <span class="k">with</span> <span class="n">NamedTemporaryFile</span><span class="p">(</span><span class="nb">dir</span><span class="o">=</span><span class="n">tmp_dir</span><span class="p">,</span> <span class="n">mode</span><span class="o">=</span><span class="s2">"w"</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span> |
| |
| <span class="k">if</span> <span class="n">field_dict</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
| <span class="n">field_dict</span> <span class="o">=</span> <span class="n">_infer_field_types_from_df</span><span class="p">(</span><span class="n">df</span><span class="p">)</span> |
| |
| <span class="n">df</span><span class="o">.</span><span class="n">to_csv</span><span class="p">(</span><span class="n">path_or_buf</span><span class="o">=</span><span class="n">f</span><span class="p">,</span> |
| <span class="n">sep</span><span class="o">=</span><span class="p">(</span><span class="n">delimiter</span><span class="o">.</span><span class="n">encode</span><span class="p">(</span><span class="n">encoding</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">six</span><span class="o">.</span><span class="n">PY2</span> <span class="ow">and</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">delimiter</span><span class="p">,</span> <span class="n">unicode</span><span class="p">)</span> |
| <span class="k">else</span> <span class="n">delimiter</span><span class="p">),</span> |
| <span class="n">header</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> |
| <span class="n">index</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> |
| <span class="n">encoding</span><span class="o">=</span><span class="n">encoding</span><span class="p">,</span> |
| <span class="n">date_format</span><span class="o">=</span><span class="s2">"%Y-%m-</span><span class="si">%d</span><span class="s2"> %H:%M:%S"</span><span class="p">,</span> |
| <span class="o">**</span><span class="n">pandas_kwargs</span><span class="p">)</span> |
| <span class="n">f</span><span class="o">.</span><span class="n">flush</span><span class="p">()</span> |
| |
| <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">load_file</span><span class="p">(</span><span class="n">filepath</span><span class="o">=</span><span class="n">f</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> |
| <span class="n">table</span><span class="o">=</span><span class="n">table</span><span class="p">,</span> |
| <span class="n">delimiter</span><span class="o">=</span><span class="n">delimiter</span><span class="p">,</span> |
| <span class="n">field_dict</span><span class="o">=</span><span class="n">field_dict</span><span class="p">,</span> |
| <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveCliHook.load_file"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveCliHook.load_file">[docs]</a> <span class="k">def</span> <span class="nf">load_file</span><span class="p">(</span> |
| <span class="bp">self</span><span class="p">,</span> |
| <span class="n">filepath</span><span class="p">,</span> |
| <span class="n">table</span><span class="p">,</span> |
| <span class="n">delimiter</span><span class="o">=</span><span class="s2">","</span><span class="p">,</span> |
| <span class="n">field_dict</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">create</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> |
| <span class="n">overwrite</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> |
| <span class="n">partition</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">recreate</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> |
| <span class="n">tblproperties</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Loads a local file into Hive</span> |
| |
| <span class="sd"> Note that the table generated in Hive uses ``STORED AS textfile``</span> |
| <span class="sd"> which isn't the most efficient serialization format. If a</span> |
| <span class="sd"> large amount of data is loaded and/or if the tables gets</span> |
| <span class="sd"> queried considerably, you may want to use this operator only to</span> |
| <span class="sd"> stage the data into a temporary table before loading it into its</span> |
| <span class="sd"> final destination using a ``HiveOperator``.</span> |
| |
| <span class="sd"> :param filepath: local filepath of the file to load</span> |
| <span class="sd"> :type filepath: str</span> |
| <span class="sd"> :param table: target Hive table, use dot notation to target a</span> |
| <span class="sd"> specific database</span> |
| <span class="sd"> :type table: str</span> |
| <span class="sd"> :param delimiter: field delimiter in the file</span> |
| <span class="sd"> :type delimiter: str</span> |
| <span class="sd"> :param field_dict: A dictionary of the fields name in the file</span> |
| <span class="sd"> as keys and their Hive types as values.</span> |
| <span class="sd"> Note that it must be OrderedDict so as to keep columns' order.</span> |
| <span class="sd"> :type field_dict: collections.OrderedDict</span> |
| <span class="sd"> :param create: whether to create the table if it doesn't exist</span> |
| <span class="sd"> :type create: bool</span> |
| <span class="sd"> :param overwrite: whether to overwrite the data in table or partition</span> |
| <span class="sd"> :type overwrite: bool</span> |
| <span class="sd"> :param partition: target partition as a dict of partition columns</span> |
| <span class="sd"> and values</span> |
| <span class="sd"> :type partition: dict</span> |
| <span class="sd"> :param recreate: whether to drop and recreate the table at every</span> |
| <span class="sd"> execution</span> |
| <span class="sd"> :type recreate: bool</span> |
| <span class="sd"> :param tblproperties: TBLPROPERTIES of the hive table being created</span> |
| <span class="sd"> :type tblproperties: dict</span> |
| <span class="sd"> """</span> |
| <span class="n">hql</span> <span class="o">=</span> <span class="s1">''</span> |
| <span class="k">if</span> <span class="n">recreate</span><span class="p">:</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"DROP TABLE IF EXISTS </span><span class="si">{table}</span><span class="s2">;</span><span class="se">\n</span><span class="s2">"</span> |
| <span class="k">if</span> <span class="n">create</span> <span class="ow">or</span> <span class="n">recreate</span><span class="p">:</span> |
| <span class="k">if</span> <span class="n">field_dict</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">"Must provide a field dict when creating a table"</span><span class="p">)</span> |
| <span class="n">fields</span> <span class="o">=</span> <span class="s2">",</span><span class="se">\n</span><span class="s2"> "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span> |
| <span class="p">[</span><span class="n">k</span> <span class="o">+</span> <span class="s1">' '</span> <span class="o">+</span> <span class="n">v</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">field_dict</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"CREATE TABLE IF NOT EXISTS </span><span class="si">{table}</span><span class="s2"> (</span><span class="se">\n</span><span class="si">{fields}</span><span class="s2">)</span><span class="se">\n</span><span class="s2">"</span> |
| <span class="k">if</span> <span class="n">partition</span><span class="p">:</span> |
| <span class="n">pfields</span> <span class="o">=</span> <span class="s2">",</span><span class="se">\n</span><span class="s2"> "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span> |
| <span class="p">[</span><span class="n">p</span> <span class="o">+</span> <span class="s2">" STRING"</span> <span class="k">for</span> <span class="n">p</span> <span class="ow">in</span> <span class="n">partition</span><span class="p">])</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"PARTITIONED BY (</span><span class="si">{pfields}</span><span class="s2">)</span><span class="se">\n</span><span class="s2">"</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"ROW FORMAT DELIMITED</span><span class="se">\n</span><span class="s2">"</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"FIELDS TERMINATED BY '</span><span class="si">{delimiter}</span><span class="s2">'</span><span class="se">\n</span><span class="s2">"</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"STORED AS textfile</span><span class="se">\n</span><span class="s2">"</span> |
| <span class="k">if</span> <span class="n">tblproperties</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span> |
| <span class="n">tprops</span> <span class="o">=</span> <span class="s2">", "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span> |
| <span class="p">[</span><span class="s2">"'</span><span class="si">{0}</span><span class="s2">'='</span><span class="si">{1}</span><span class="s2">'"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">tblproperties</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"TBLPROPERTIES(</span><span class="si">{tprops}</span><span class="s2">)</span><span class="se">\n</span><span class="s2">"</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">";"</span> |
| <span class="n">hql</span> <span class="o">=</span> <span class="n">hql</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span> |
| <span class="n">hql</span> <span class="o">=</span> <span class="s2">"LOAD DATA LOCAL INPATH '</span><span class="si">{filepath}</span><span class="s2">' "</span> |
| <span class="k">if</span> <span class="n">overwrite</span><span class="p">:</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"OVERWRITE "</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"INTO TABLE </span><span class="si">{table}</span><span class="s2"> "</span> |
| <span class="k">if</span> <span class="n">partition</span><span class="p">:</span> |
| <span class="n">pvals</span> <span class="o">=</span> <span class="s2">", "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span> |
| <span class="p">[</span><span class="s2">"</span><span class="si">{0}</span><span class="s2">='</span><span class="si">{1}</span><span class="s2">'"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">partition</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"PARTITION (</span><span class="si">{pvals}</span><span class="s2">)"</span> |
| |
| <span class="c1"># As a workaround for HIVE-10541, add a newline character</span> |
| <span class="c1"># at the end of hql (AIRFLOW-2412).</span> |
| <span class="n">hql</span> <span class="o">+=</span> <span class="s1">';</span><span class="se">\n</span><span class="s1">'</span> |
| |
| <span class="n">hql</span> <span class="o">=</span> <span class="n">hql</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveCliHook.kill"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveCliHook.kill">[docs]</a> <span class="k">def</span> <span class="nf">kill</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
| <span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s1">'sp'</span><span class="p">):</span> |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">sp</span><span class="o">.</span><span class="n">poll</span><span class="p">()</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
| <span class="nb">print</span><span class="p">(</span><span class="s2">"Killing the Hive job"</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">sp</span><span class="o">.</span><span class="n">terminate</span><span class="p">()</span> |
| <span class="n">time</span><span class="o">.</span><span class="n">sleep</span><span class="p">(</span><span class="mi">60</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">sp</span><span class="o">.</span><span class="n">kill</span><span class="p">()</span></div></div> |
| |
| |
| <div class="viewcode-block" id="HiveMetastoreHook"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook">[docs]</a><span class="k">class</span> <span class="nc">HiveMetastoreHook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">):</span> |
| <span class="sd">""" Wrapper to interact with the Hive Metastore"""</span> |
| |
| <span class="c1"># java short max val</span> |
| <div class="viewcode-block" id="HiveMetastoreHook.MAX_PART_COUNT"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.MAX_PART_COUNT">[docs]</a> <span class="n">MAX_PART_COUNT</span> <span class="o">=</span> <span class="mi">32767</span></div> |
| |
| <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">metastore_conn_id</span><span class="o">=</span><span class="s1">'metastore_default'</span><span class="p">):</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">metastore_conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="n">metastore_conn_id</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_metastore_client</span><span class="p">()</span> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.__getstate__"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.__getstate__">[docs]</a> <span class="k">def</span> <span class="nf">__getstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
| <span class="c1"># This is for pickling to work despite the thirft hive client not</span> |
| <span class="c1"># being pickable</span> |
| <span class="n">d</span> <span class="o">=</span> <span class="nb">dict</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">)</span> |
| <span class="k">del</span> <span class="n">d</span><span class="p">[</span><span class="s1">'metastore'</span><span class="p">]</span> |
| <span class="k">return</span> <span class="n">d</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.__setstate__"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.__setstate__">[docs]</a> <span class="k">def</span> <span class="nf">__setstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">d</span><span class="p">):</span> |
| <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">d</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">[</span><span class="s1">'metastore'</span><span class="p">]</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_metastore_client</span><span class="p">()</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.get_metastore_client"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_metastore_client">[docs]</a> <span class="k">def</span> <span class="nf">get_metastore_client</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Returns a Hive thrift client.</span> |
| <span class="sd"> """</span> |
| <span class="kn">import</span> <span class="nn">hmsclient</span> |
| <span class="kn">from</span> <span class="nn">thrift.transport</span> <span class="k">import</span> <span class="n">TSocket</span><span class="p">,</span> <span class="n">TTransport</span> |
| <span class="kn">from</span> <span class="nn">thrift.protocol</span> <span class="k">import</span> <span class="n">TBinaryProtocol</span> |
| <span class="n">ms</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore_conn</span> |
| <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'NOSASL'</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">conf</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span><span class="p">:</span> |
| <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'GSSAPI'</span><span class="p">)</span> |
| <span class="n">kerberos_service_name</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'kerberos_service_name'</span><span class="p">,</span> <span class="s1">'hive'</span><span class="p">)</span> |
| |
| <span class="n">socket</span> <span class="o">=</span> <span class="n">TSocket</span><span class="o">.</span><span class="n">TSocket</span><span class="p">(</span><span class="n">ms</span><span class="o">.</span><span class="n">host</span><span class="p">,</span> <span class="n">ms</span><span class="o">.</span><span class="n">port</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">conf</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span> \ |
| <span class="ow">and</span> <span class="n">auth_mechanism</span> <span class="o">==</span> <span class="s1">'GSSAPI'</span><span class="p">:</span> |
| <span class="k">try</span><span class="p">:</span> |
| <span class="kn">import</span> <span class="nn">saslwrapper</span> <span class="k">as</span> <span class="nn">sasl</span> |
| <span class="k">except</span> <span class="ne">ImportError</span><span class="p">:</span> |
| <span class="kn">import</span> <span class="nn">sasl</span> |
| |
| <span class="k">def</span> <span class="nf">sasl_factory</span><span class="p">():</span> |
| <span class="n">sasl_client</span> <span class="o">=</span> <span class="n">sasl</span><span class="o">.</span><span class="n">Client</span><span class="p">()</span> |
| <span class="n">sasl_client</span><span class="o">.</span><span class="n">setAttr</span><span class="p">(</span><span class="s2">"host"</span><span class="p">,</span> <span class="n">ms</span><span class="o">.</span><span class="n">host</span><span class="p">)</span> |
| <span class="n">sasl_client</span><span class="o">.</span><span class="n">setAttr</span><span class="p">(</span><span class="s2">"service"</span><span class="p">,</span> <span class="n">kerberos_service_name</span><span class="p">)</span> |
| <span class="n">sasl_client</span><span class="o">.</span><span class="n">init</span><span class="p">()</span> |
| <span class="k">return</span> <span class="n">sasl_client</span> |
| |
| <span class="kn">from</span> <span class="nn">thrift_sasl</span> <span class="k">import</span> <span class="n">TSaslClientTransport</span> |
| <span class="n">transport</span> <span class="o">=</span> <span class="n">TSaslClientTransport</span><span class="p">(</span><span class="n">sasl_factory</span><span class="p">,</span> <span class="s2">"GSSAPI"</span><span class="p">,</span> <span class="n">socket</span><span class="p">)</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="n">transport</span> <span class="o">=</span> <span class="n">TTransport</span><span class="o">.</span><span class="n">TBufferedTransport</span><span class="p">(</span><span class="n">socket</span><span class="p">)</span> |
| |
| <span class="n">protocol</span> <span class="o">=</span> <span class="n">TBinaryProtocol</span><span class="o">.</span><span class="n">TBinaryProtocol</span><span class="p">(</span><span class="n">transport</span><span class="p">)</span> |
| |
| <span class="k">return</span> <span class="n">hmsclient</span><span class="o">.</span><span class="n">HMSClient</span><span class="p">(</span><span class="n">iprot</span><span class="o">=</span><span class="n">protocol</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.get_conn"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_conn">[docs]</a> <span class="k">def</span> <span class="nf">get_conn</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> |
| <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.check_for_partition"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.check_for_partition">[docs]</a> <span class="k">def</span> <span class="nf">check_for_partition</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Checks whether a partition exists</span> |
| |
| <span class="sd"> :param schema: Name of hive schema (database) @table belongs to</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :param table: Name of hive table @partition belongs to</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :partition: Expression that matches the partitions to check for</span> |
| <span class="sd"> (eg `a = 'b' AND c = 'd'`)</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :rtype: bool</span> |
| |
| <span class="sd"> >>> hh = HiveMetastoreHook()</span> |
| <span class="sd"> >>> t = 'static_babynames_partitioned'</span> |
| <span class="sd"> >>> hh.check_for_partition('airflow', t, "ds='2015-01-01'")</span> |
| <span class="sd"> True</span> |
| <span class="sd"> """</span> |
| <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span> |
| <span class="n">partitions</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_partitions_by_filter</span><span class="p">(</span> |
| <span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span> |
| |
| <span class="k">if</span> <span class="n">partitions</span><span class="p">:</span> |
| <span class="k">return</span> <span class="kc">True</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="k">return</span> <span class="kc">False</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.check_for_named_partition"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.check_for_named_partition">[docs]</a> <span class="k">def</span> <span class="nf">check_for_named_partition</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition_name</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Checks whether a partition with a given name exists</span> |
| |
| <span class="sd"> :param schema: Name of hive schema (database) @table belongs to</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :param table: Name of hive table @partition belongs to</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :partition: Name of the partitions to check for (eg `a=b/c=d`)</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :rtype: bool</span> |
| |
| <span class="sd"> >>> hh = HiveMetastoreHook()</span> |
| <span class="sd"> >>> t = 'static_babynames_partitioned'</span> |
| <span class="sd"> >>> hh.check_for_named_partition('airflow', t, "ds=2015-01-01")</span> |
| <span class="sd"> True</span> |
| <span class="sd"> >>> hh.check_for_named_partition('airflow', t, "ds=xxx")</span> |
| <span class="sd"> False</span> |
| <span class="sd"> """</span> |
| <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span> |
| <span class="k">return</span> <span class="n">client</span><span class="o">.</span><span class="n">check_for_named_partition</span><span class="p">(</span><span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition_name</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.get_table"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_table">[docs]</a> <span class="k">def</span> <span class="nf">get_table</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="n">db</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span> |
| <span class="sd">"""Get a metastore table object</span> |
| |
| <span class="sd"> >>> hh = HiveMetastoreHook()</span> |
| <span class="sd"> >>> t = hh.get_table(db='airflow', table_name='static_babynames')</span> |
| <span class="sd"> >>> t.tableName</span> |
| <span class="sd"> 'static_babynames'</span> |
| <span class="sd"> >>> [col.name for col in t.sd.cols]</span> |
| <span class="sd"> ['state', 'year', 'name', 'gender', 'num']</span> |
| <span class="sd"> """</span> |
| <span class="k">if</span> <span class="n">db</span> <span class="o">==</span> <span class="s1">'default'</span> <span class="ow">and</span> <span class="s1">'.'</span> <span class="ow">in</span> <span class="n">table_name</span><span class="p">:</span> |
| <span class="n">db</span><span class="p">,</span> <span class="n">table_name</span> <span class="o">=</span> <span class="n">table_name</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'.'</span><span class="p">)[:</span><span class="mi">2</span><span class="p">]</span> |
| <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span> |
| <span class="k">return</span> <span class="n">client</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">dbname</span><span class="o">=</span><span class="n">db</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.get_tables"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_tables">[docs]</a> <span class="k">def</span> <span class="nf">get_tables</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">db</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="s1">'*'</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Get a metastore table object</span> |
| <span class="sd"> """</span> |
| <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span> |
| <span class="n">tables</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_tables</span><span class="p">(</span><span class="n">db_name</span><span class="o">=</span><span class="n">db</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="n">pattern</span><span class="p">)</span> |
| <span class="k">return</span> <span class="n">client</span><span class="o">.</span><span class="n">get_table_objects_by_name</span><span class="p">(</span><span class="n">db</span><span class="p">,</span> <span class="n">tables</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.get_databases"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_databases">[docs]</a> <span class="k">def</span> <span class="nf">get_databases</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="s1">'*'</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Get a metastore table object</span> |
| <span class="sd"> """</span> |
| <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span> |
| <span class="k">return</span> <span class="n">client</span><span class="o">.</span><span class="n">get_databases</span><span class="p">(</span><span class="n">pattern</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.get_partitions"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_partitions">[docs]</a> <span class="k">def</span> <span class="nf">get_partitions</span><span class="p">(</span> |
| <span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="nb">filter</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Returns a list of all partitions in a table. Works only</span> |
| <span class="sd"> for tables with less than 32767 (java short max val).</span> |
| <span class="sd"> For subpartitioned table, the number might easily exceed this.</span> |
| |
| <span class="sd"> >>> hh = HiveMetastoreHook()</span> |
| <span class="sd"> >>> t = 'static_babynames_partitioned'</span> |
| <span class="sd"> >>> parts = hh.get_partitions(schema='airflow', table_name=t)</span> |
| <span class="sd"> >>> len(parts)</span> |
| <span class="sd"> 1</span> |
| <span class="sd"> >>> parts</span> |
| <span class="sd"> [{'ds': '2015-01-01'}]</span> |
| <span class="sd"> """</span> |
| <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span> |
| <span class="n">table</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">dbname</span><span class="o">=</span><span class="n">schema</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">)</span> |
| <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">table</span><span class="o">.</span><span class="n">partitionKeys</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">"The table isn't partitioned"</span><span class="p">)</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="k">if</span> <span class="nb">filter</span><span class="p">:</span> |
| <span class="n">parts</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_partitions_by_filter</span><span class="p">(</span> |
| <span class="n">db_name</span><span class="o">=</span><span class="n">schema</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">,</span> |
| <span class="nb">filter</span><span class="o">=</span><span class="nb">filter</span><span class="p">,</span> <span class="n">max_parts</span><span class="o">=</span><span class="n">HiveMetastoreHook</span><span class="o">.</span><span class="n">MAX_PART_COUNT</span><span class="p">)</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="n">parts</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_partitions</span><span class="p">(</span> |
| <span class="n">db_name</span><span class="o">=</span><span class="n">schema</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">,</span> |
| <span class="n">max_parts</span><span class="o">=</span><span class="n">HiveMetastoreHook</span><span class="o">.</span><span class="n">MAX_PART_COUNT</span><span class="p">)</span> |
| |
| <span class="n">pnames</span> <span class="o">=</span> <span class="p">[</span><span class="n">p</span><span class="o">.</span><span class="n">name</span> <span class="k">for</span> <span class="n">p</span> <span class="ow">in</span> <span class="n">table</span><span class="o">.</span><span class="n">partitionKeys</span><span class="p">]</span> |
| <span class="k">return</span> <span class="p">[</span><span class="nb">dict</span><span class="p">(</span><span class="nb">zip</span><span class="p">(</span><span class="n">pnames</span><span class="p">,</span> <span class="n">p</span><span class="o">.</span><span class="n">values</span><span class="p">))</span> <span class="k">for</span> <span class="n">p</span> <span class="ow">in</span> <span class="n">parts</span><span class="p">]</span></div> |
| |
| <span class="nd">@staticmethod</span> |
| <div class="viewcode-block" id="HiveMetastoreHook._get_max_partition_from_part_specs"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook._get_max_partition_from_part_specs">[docs]</a> <span class="k">def</span> <span class="nf">_get_max_partition_from_part_specs</span><span class="p">(</span><span class="n">part_specs</span><span class="p">,</span> <span class="n">partition_key</span><span class="p">,</span> <span class="n">filter_map</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Helper method to get max partition of partitions with partition_key</span> |
| <span class="sd"> from part specs. key:value pair in filter_map will be used to</span> |
| <span class="sd"> filter out partitions.</span> |
| |
| <span class="sd"> :param part_specs: list of partition specs.</span> |
| <span class="sd"> :type part_specs: list</span> |
| <span class="sd"> :param partition_key: partition key name.</span> |
| <span class="sd"> :type partition_key: str</span> |
| <span class="sd"> :param filter_map: partition_key:partition_value map used for partition filtering,</span> |
| <span class="sd"> e.g. {'key1': 'value1', 'key2': 'value2'}.</span> |
| <span class="sd"> Only partitions matching all partition_key:partition_value</span> |
| <span class="sd"> pairs will be considered as candidates of max partition.</span> |
| <span class="sd"> :type filter_map: map</span> |
| <span class="sd"> :return: Max partition or None if part_specs is empty.</span> |
| <span class="sd"> """</span> |
| <span class="k">if</span> <span class="ow">not</span> <span class="n">part_specs</span><span class="p">:</span> |
| <span class="k">return</span> <span class="kc">None</span> |
| |
| <span class="c1"># Assuming all specs have the same keys.</span> |
| <span class="k">if</span> <span class="n">partition_key</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">part_specs</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">keys</span><span class="p">():</span> |
| <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">"Provided partition_key </span><span class="si">{}</span><span class="s2"> "</span> |
| <span class="s2">"is not in part_specs."</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">partition_key</span><span class="p">))</span> |
| <span class="k">if</span> <span class="n">filter_map</span><span class="p">:</span> |
| <span class="n">is_subset</span> <span class="o">=</span> <span class="nb">set</span><span class="p">(</span><span class="n">filter_map</span><span class="o">.</span><span class="n">keys</span><span class="p">())</span><span class="o">.</span><span class="n">issubset</span><span class="p">(</span><span class="nb">set</span><span class="p">(</span><span class="n">part_specs</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">keys</span><span class="p">()))</span> |
| <span class="k">if</span> <span class="n">filter_map</span> <span class="ow">and</span> <span class="ow">not</span> <span class="n">is_subset</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">"Keys in provided filter_map </span><span class="si">{}</span><span class="s2"> "</span> |
| <span class="s2">"are not subset of part_spec keys: </span><span class="si">{}</span><span class="s2">"</span> |
| <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="s1">', '</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">filter_map</span><span class="o">.</span><span class="n">keys</span><span class="p">()),</span> |
| <span class="s1">', '</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">part_specs</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">keys</span><span class="p">())))</span> |
| |
| <span class="n">candidates</span> <span class="o">=</span> <span class="p">[</span><span class="n">p_dict</span><span class="p">[</span><span class="n">partition_key</span><span class="p">]</span> <span class="k">for</span> <span class="n">p_dict</span> <span class="ow">in</span> <span class="n">part_specs</span> |
| <span class="k">if</span> <span class="n">filter_map</span> <span class="ow">is</span> <span class="kc">None</span> <span class="ow">or</span> |
| <span class="nb">all</span><span class="p">(</span><span class="n">item</span> <span class="ow">in</span> <span class="n">p_dict</span><span class="o">.</span><span class="n">items</span><span class="p">()</span> <span class="k">for</span> <span class="n">item</span> <span class="ow">in</span> <span class="n">filter_map</span><span class="o">.</span><span class="n">items</span><span class="p">())]</span> |
| |
| <span class="k">if</span> <span class="ow">not</span> <span class="n">candidates</span><span class="p">:</span> |
| <span class="k">return</span> <span class="kc">None</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="k">return</span> <span class="nb">max</span><span class="p">(</span><span class="n">candidates</span><span class="p">)</span><span class="o">.</span><span class="n">encode</span><span class="p">(</span><span class="s1">'utf-8'</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.max_partition"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.max_partition">[docs]</a> <span class="k">def</span> <span class="nf">max_partition</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="n">field</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">filter_map</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Returns the maximum value for all partitions with given field in a table.</span> |
| <span class="sd"> If only one partition key exist in the table, the key will be used as field.</span> |
| <span class="sd"> filter_map should be a partition_key:partition_value map and will be used to</span> |
| <span class="sd"> filter out partitions.</span> |
| |
| <span class="sd"> :param schema: schema name.</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :param table_name: table name.</span> |
| <span class="sd"> :type table_name: str</span> |
| <span class="sd"> :param field: partition key to get max partition from.</span> |
| <span class="sd"> :type field: str</span> |
| <span class="sd"> :param filter_map: partition_key:partition_value map used for partition filtering.</span> |
| <span class="sd"> :type filter_map: map</span> |
| |
| <span class="sd"> >>> hh = HiveMetastoreHook()</span> |
| <span class="sd"> >>> filter_map = {'ds': '2015-01-01', 'ds': '2014-01-01'}</span> |
| <span class="sd"> >>> t = 'static_babynames_partitioned'</span> |
| <span class="sd"> >>> hh.max_partition(schema='airflow',\</span> |
| <span class="sd"> ... table_name=t, field='ds', filter_map=filter_map)</span> |
| <span class="sd"> '2015-01-01'</span> |
| <span class="sd"> """</span> |
| <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span> |
| <span class="n">table</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">dbname</span><span class="o">=</span><span class="n">schema</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">)</span> |
| <span class="n">key_name_set</span> <span class="o">=</span> <span class="nb">set</span><span class="p">(</span><span class="n">key</span><span class="o">.</span><span class="n">name</span> <span class="k">for</span> <span class="n">key</span> <span class="ow">in</span> <span class="n">table</span><span class="o">.</span><span class="n">partitionKeys</span><span class="p">)</span> |
| <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">table</span><span class="o">.</span><span class="n">partitionKeys</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span> |
| <span class="n">field</span> <span class="o">=</span> <span class="n">table</span><span class="o">.</span><span class="n">partitionKeys</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">name</span> |
| <span class="k">elif</span> <span class="ow">not</span> <span class="n">field</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">"Please specify the field you want the max "</span> |
| <span class="s2">"value for."</span><span class="p">)</span> |
| <span class="k">elif</span> <span class="n">field</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">key_name_set</span><span class="p">:</span> |
| <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">"Provided field is not a partition key."</span><span class="p">)</span> |
| |
| <span class="k">if</span> <span class="n">filter_map</span> <span class="ow">and</span> <span class="ow">not</span> <span class="nb">set</span><span class="p">(</span><span class="n">filter_map</span><span class="o">.</span><span class="n">keys</span><span class="p">())</span><span class="o">.</span><span class="n">issubset</span><span class="p">(</span><span class="n">key_name_set</span><span class="p">):</span> |
| <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">"Provided filter_map contains keys "</span> |
| <span class="s2">"that are not partition key."</span><span class="p">)</span> |
| |
| <span class="n">part_names</span> <span class="o">=</span> \ |
| <span class="n">client</span><span class="o">.</span><span class="n">get_partition_names</span><span class="p">(</span><span class="n">schema</span><span class="p">,</span> |
| <span class="n">table_name</span><span class="p">,</span> |
| <span class="n">max_parts</span><span class="o">=</span><span class="n">HiveMetastoreHook</span><span class="o">.</span><span class="n">MAX_PART_COUNT</span><span class="p">)</span> |
| <span class="n">part_specs</span> <span class="o">=</span> <span class="p">[</span><span class="n">client</span><span class="o">.</span><span class="n">partition_name_to_spec</span><span class="p">(</span><span class="n">part_name</span><span class="p">)</span> |
| <span class="k">for</span> <span class="n">part_name</span> <span class="ow">in</span> <span class="n">part_names</span><span class="p">]</span> |
| |
| <span class="k">return</span> <span class="n">HiveMetastoreHook</span><span class="o">.</span><span class="n">_get_max_partition_from_part_specs</span><span class="p">(</span><span class="n">part_specs</span><span class="p">,</span> |
| <span class="n">field</span><span class="p">,</span> |
| <span class="n">filter_map</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveMetastoreHook.table_exists"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveMetastoreHook.table_exists">[docs]</a> <span class="k">def</span> <span class="nf">table_exists</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="n">db</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Check if table exists</span> |
| |
| <span class="sd"> >>> hh = HiveMetastoreHook()</span> |
| <span class="sd"> >>> hh.table_exists(db='airflow', table_name='static_babynames')</span> |
| <span class="sd"> True</span> |
| <span class="sd"> >>> hh.table_exists(db='airflow', table_name='does_not_exist')</span> |
| <span class="sd"> False</span> |
| <span class="sd"> """</span> |
| <span class="k">try</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">table_name</span><span class="p">,</span> <span class="n">db</span><span class="p">)</span> |
| <span class="k">return</span> <span class="kc">True</span> |
| <span class="k">except</span> <span class="ne">Exception</span><span class="p">:</span> |
| <span class="k">return</span> <span class="kc">False</span></div></div> |
| |
| |
| <div class="viewcode-block" id="HiveServer2Hook"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveServer2Hook">[docs]</a><span class="k">class</span> <span class="nc">HiveServer2Hook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Wrapper around the pyhive library</span> |
| |
| <span class="sd"> Note that the default authMechanism is PLAIN, to override it you</span> |
| <span class="sd"> can specify it in the ``extra`` of your connection in the UI as in</span> |
| <span class="sd"> """</span> |
| <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hiveserver2_conn_id</span><span class="o">=</span><span class="s1">'hiveserver2_default'</span><span class="p">):</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">hiveserver2_conn_id</span> <span class="o">=</span> <span class="n">hiveserver2_conn_id</span> |
| |
| <div class="viewcode-block" id="HiveServer2Hook.get_conn"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveServer2Hook.get_conn">[docs]</a> <span class="k">def</span> <span class="nf">get_conn</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Returns a Hive connection object.</span> |
| <span class="sd"> """</span> |
| <span class="n">db</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">hiveserver2_conn_id</span><span class="p">)</span> |
| <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">db</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'NONE'</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">auth_mechanism</span> <span class="o">==</span> <span class="s1">'NONE'</span> <span class="ow">and</span> <span class="n">db</span><span class="o">.</span><span class="n">login</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span> |
| <span class="c1"># we need to give a username</span> |
| <span class="n">username</span> <span class="o">=</span> <span class="s1">'airflow'</span> |
| <span class="n">kerberos_service_name</span> <span class="o">=</span> <span class="kc">None</span> |
| <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">conf</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span><span class="p">:</span> |
| <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">db</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'KERBEROS'</span><span class="p">)</span> |
| <span class="n">kerberos_service_name</span> <span class="o">=</span> <span class="n">db</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'kerberos_service_name'</span><span class="p">,</span> <span class="s1">'hive'</span><span class="p">)</span> |
| |
| <span class="c1"># pyhive uses GSSAPI instead of KERBEROS as a auth_mechanism identifier</span> |
| <span class="k">if</span> <span class="n">auth_mechanism</span> <span class="o">==</span> <span class="s1">'GSSAPI'</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">warning</span><span class="p">(</span> |
| <span class="s2">"Detected deprecated 'GSSAPI' for authMechanism "</span> |
| <span class="s2">"for </span><span class="si">%s</span><span class="s2">. Please use 'KERBEROS' instead"</span><span class="p">,</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">hiveserver2_conn_id</span> |
| <span class="p">)</span> |
| <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="s1">'KERBEROS'</span> |
| |
| <span class="kn">from</span> <span class="nn">pyhive.hive</span> <span class="k">import</span> <span class="n">connect</span> |
| <span class="k">return</span> <span class="n">connect</span><span class="p">(</span> |
| <span class="n">host</span><span class="o">=</span><span class="n">db</span><span class="o">.</span><span class="n">host</span><span class="p">,</span> |
| <span class="n">port</span><span class="o">=</span><span class="n">db</span><span class="o">.</span><span class="n">port</span><span class="p">,</span> |
| <span class="n">auth</span><span class="o">=</span><span class="n">auth_mechanism</span><span class="p">,</span> |
| <span class="n">kerberos_service_name</span><span class="o">=</span><span class="n">kerberos_service_name</span><span class="p">,</span> |
| <span class="n">username</span><span class="o">=</span><span class="n">db</span><span class="o">.</span><span class="n">login</span> <span class="ow">or</span> <span class="n">username</span><span class="p">,</span> |
| <span class="n">database</span><span class="o">=</span><span class="n">schema</span> <span class="ow">or</span> <span class="n">db</span><span class="o">.</span><span class="n">schema</span> <span class="ow">or</span> <span class="s1">'default'</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveServer2Hook._get_results"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveServer2Hook._get_results">[docs]</a> <span class="k">def</span> <span class="nf">_get_results</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">,</span> <span class="n">fetch_size</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">hive_conf</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="kn">from</span> <span class="nn">pyhive.exc</span> <span class="k">import</span> <span class="n">ProgrammingError</span> |
| <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">hql</span><span class="p">,</span> <span class="n">basestring</span><span class="p">):</span> |
| <span class="n">hql</span> <span class="o">=</span> <span class="p">[</span><span class="n">hql</span><span class="p">]</span> |
| <span class="n">previous_description</span> <span class="o">=</span> <span class="kc">None</span> |
| <span class="k">with</span> <span class="n">contextlib</span><span class="o">.</span><span class="n">closing</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">get_conn</span><span class="p">(</span><span class="n">schema</span><span class="p">))</span> <span class="k">as</span> <span class="n">conn</span><span class="p">,</span> \ |
| <span class="n">contextlib</span><span class="o">.</span><span class="n">closing</span><span class="p">(</span><span class="n">conn</span><span class="o">.</span><span class="n">cursor</span><span class="p">())</span> <span class="k">as</span> <span class="n">cur</span><span class="p">:</span> |
| <span class="n">cur</span><span class="o">.</span><span class="n">arraysize</span> <span class="o">=</span> <span class="n">fetch_size</span> <span class="ow">or</span> <span class="mi">1000</span> |
| |
| <span class="n">env_context</span> <span class="o">=</span> <span class="n">get_context_from_env_var</span><span class="p">()</span> |
| <span class="k">if</span> <span class="n">hive_conf</span><span class="p">:</span> |
| <span class="n">env_context</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">hive_conf</span><span class="p">)</span> |
| <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">env_context</span><span class="o">.</span><span class="n">items</span><span class="p">():</span> |
| <span class="n">cur</span><span class="o">.</span><span class="n">execute</span><span class="p">(</span><span class="s2">"set </span><span class="si">{}</span><span class="s2">=</span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">))</span> |
| |
| <span class="k">for</span> <span class="n">statement</span> <span class="ow">in</span> <span class="n">hql</span><span class="p">:</span> |
| <span class="n">cur</span><span class="o">.</span><span class="n">execute</span><span class="p">(</span><span class="n">statement</span><span class="p">)</span> |
| <span class="c1"># we only get results of statements that returns</span> |
| <span class="n">lowered_statement</span> <span class="o">=</span> <span class="n">statement</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span><span class="o">.</span><span class="n">strip</span><span class="p">()</span> |
| <span class="k">if</span> <span class="p">(</span><span class="n">lowered_statement</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'select'</span><span class="p">)</span> <span class="ow">or</span> |
| <span class="n">lowered_statement</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'with'</span><span class="p">)</span> <span class="ow">or</span> |
| <span class="p">(</span><span class="n">lowered_statement</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'set'</span><span class="p">)</span> <span class="ow">and</span> |
| <span class="s1">'='</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">lowered_statement</span><span class="p">)):</span> |
| <span class="n">description</span> <span class="o">=</span> <span class="p">[</span><span class="n">c</span> <span class="k">for</span> <span class="n">c</span> <span class="ow">in</span> <span class="n">cur</span><span class="o">.</span><span class="n">description</span><span class="p">]</span> |
| <span class="k">if</span> <span class="n">previous_description</span> <span class="ow">and</span> <span class="n">previous_description</span> <span class="o">!=</span> <span class="n">description</span><span class="p">:</span> |
| <span class="n">message</span> <span class="o">=</span> <span class="s1">'''The statements are producing different descriptions:</span> |
| <span class="s1"> Current: </span><span class="si">{}</span><span class="s1"></span> |
| <span class="s1"> Previous: </span><span class="si">{}</span><span class="s1">'''</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">repr</span><span class="p">(</span><span class="n">description</span><span class="p">),</span> |
| <span class="nb">repr</span><span class="p">(</span><span class="n">previous_description</span><span class="p">))</span> |
| <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="n">message</span><span class="p">)</span> |
| <span class="k">elif</span> <span class="ow">not</span> <span class="n">previous_description</span><span class="p">:</span> |
| <span class="n">previous_description</span> <span class="o">=</span> <span class="n">description</span> |
| <span class="k">yield</span> <span class="n">description</span> |
| <span class="k">try</span><span class="p">:</span> |
| <span class="c1"># DB API 2 raises when no results are returned</span> |
| <span class="c1"># we're silencing here as some statements in the list</span> |
| <span class="c1"># may be `SET` or DDL</span> |
| <span class="k">for</span> <span class="n">row</span> <span class="ow">in</span> <span class="n">cur</span><span class="p">:</span> |
| <span class="k">yield</span> <span class="n">row</span> |
| <span class="k">except</span> <span class="n">ProgrammingError</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">debug</span><span class="p">(</span><span class="s2">"get_results returned no records"</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveServer2Hook.get_results"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveServer2Hook.get_results">[docs]</a> <span class="k">def</span> <span class="nf">get_results</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">,</span> <span class="n">fetch_size</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">hive_conf</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Get results of the provided hql in target schema.</span> |
| |
| <span class="sd"> :param hql: hql to be executed.</span> |
| <span class="sd"> :type hql: str or list</span> |
| <span class="sd"> :param schema: target schema, default to 'default'.</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :param fetch_size: max size of result to fetch.</span> |
| <span class="sd"> :type fetch_size: int</span> |
| <span class="sd"> :param hive_conf: hive_conf to execute alone with the hql.</span> |
| <span class="sd"> :type hive_conf: dict</span> |
| <span class="sd"> :return: results of hql execution, dict with data (list of results) and header</span> |
| <span class="sd"> :rtype: dict</span> |
| <span class="sd"> """</span> |
| <span class="n">results_iter</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_results</span><span class="p">(</span><span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> |
| <span class="n">fetch_size</span><span class="o">=</span><span class="n">fetch_size</span><span class="p">,</span> <span class="n">hive_conf</span><span class="o">=</span><span class="n">hive_conf</span><span class="p">)</span> |
| <span class="n">header</span> <span class="o">=</span> <span class="nb">next</span><span class="p">(</span><span class="n">results_iter</span><span class="p">)</span> |
| <span class="n">results</span> <span class="o">=</span> <span class="p">{</span> |
| <span class="s1">'data'</span><span class="p">:</span> <span class="nb">list</span><span class="p">(</span><span class="n">results_iter</span><span class="p">),</span> |
| <span class="s1">'header'</span><span class="p">:</span> <span class="n">header</span> |
| <span class="p">}</span> |
| <span class="k">return</span> <span class="n">results</span></div> |
| |
| <div class="viewcode-block" id="HiveServer2Hook.to_csv"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveServer2Hook.to_csv">[docs]</a> <span class="k">def</span> <span class="nf">to_csv</span><span class="p">(</span> |
| <span class="bp">self</span><span class="p">,</span> |
| <span class="n">hql</span><span class="p">,</span> |
| <span class="n">csv_filepath</span><span class="p">,</span> |
| <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">,</span> |
| <span class="n">delimiter</span><span class="o">=</span><span class="s1">','</span><span class="p">,</span> |
| <span class="n">lineterminator</span><span class="o">=</span><span class="s1">'</span><span class="se">\r\n</span><span class="s1">'</span><span class="p">,</span> |
| <span class="n">output_header</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> |
| <span class="n">fetch_size</span><span class="o">=</span><span class="mi">1000</span><span class="p">,</span> |
| <span class="n">hive_conf</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Execute hql in target schema and write results to a csv file.</span> |
| |
| <span class="sd"> :param hql: hql to be executed.</span> |
| <span class="sd"> :type hql: str or list</span> |
| <span class="sd"> :param csv_filepath: filepath of csv to write results into.</span> |
| <span class="sd"> :type csv_filepath: str</span> |
| <span class="sd"> :param schema: target schema, default to 'default'.</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :param delimiter: delimiter of the csv file, default to ','.</span> |
| <span class="sd"> :type delimiter: str</span> |
| <span class="sd"> :param lineterminator: lineterminator of the csv file.</span> |
| <span class="sd"> :type lineterminator: str</span> |
| <span class="sd"> :param output_header: header of the csv file, default to True.</span> |
| <span class="sd"> :type output_header: bool</span> |
| <span class="sd"> :param fetch_size: number of result rows to write into the csv file, default to 1000.</span> |
| <span class="sd"> :type fetch_size: int</span> |
| <span class="sd"> :param hive_conf: hive_conf to execute alone with the hql.</span> |
| <span class="sd"> :type hive_conf: dict</span> |
| |
| <span class="sd"> """</span> |
| |
| <span class="n">results_iter</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_results</span><span class="p">(</span><span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> |
| <span class="n">fetch_size</span><span class="o">=</span><span class="n">fetch_size</span><span class="p">,</span> <span class="n">hive_conf</span><span class="o">=</span><span class="n">hive_conf</span><span class="p">)</span> |
| <span class="n">header</span> <span class="o">=</span> <span class="nb">next</span><span class="p">(</span><span class="n">results_iter</span><span class="p">)</span> |
| <span class="n">message</span> <span class="o">=</span> <span class="kc">None</span> |
| |
| <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span> |
| <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="n">csv_filepath</span><span class="p">,</span> <span class="s1">'wb'</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span> |
| <span class="n">writer</span> <span class="o">=</span> <span class="n">csv</span><span class="o">.</span><span class="n">writer</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> |
| <span class="n">delimiter</span><span class="o">=</span><span class="n">delimiter</span><span class="p">,</span> |
| <span class="n">lineterminator</span><span class="o">=</span><span class="n">lineterminator</span><span class="p">,</span> |
| <span class="n">encoding</span><span class="o">=</span><span class="s1">'utf-8'</span><span class="p">)</span> |
| <span class="k">try</span><span class="p">:</span> |
| <span class="k">if</span> <span class="n">output_header</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">debug</span><span class="p">(</span><span class="s1">'Cursor description is </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">header</span><span class="p">)</span> |
| <span class="n">writer</span><span class="o">.</span><span class="n">writerow</span><span class="p">([</span><span class="n">c</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">c</span> <span class="ow">in</span> <span class="n">header</span><span class="p">])</span> |
| |
| <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">row</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">results_iter</span><span class="p">,</span> <span class="mi">1</span><span class="p">):</span> |
| <span class="n">writer</span><span class="o">.</span><span class="n">writerow</span><span class="p">(</span><span class="n">row</span><span class="p">)</span> |
| <span class="k">if</span> <span class="n">i</span> <span class="o">%</span> <span class="n">fetch_size</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Written </span><span class="si">%s</span><span class="s2"> rows so far."</span><span class="p">,</span> <span class="n">i</span><span class="p">)</span> |
| <span class="k">except</span> <span class="ne">ValueError</span> <span class="k">as</span> <span class="n">exception</span><span class="p">:</span> |
| <span class="n">message</span> <span class="o">=</span> <span class="nb">str</span><span class="p">(</span><span class="n">exception</span><span class="p">)</span> |
| |
| <span class="k">if</span> <span class="n">message</span><span class="p">:</span> |
| <span class="c1"># need to clean up the file first</span> |
| <span class="n">os</span><span class="o">.</span><span class="n">remove</span><span class="p">(</span><span class="n">csv_filepath</span><span class="p">)</span> |
| <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="n">message</span><span class="p">)</span> |
| |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Done. Loaded a total of </span><span class="si">%s</span><span class="s2"> rows."</span><span class="p">,</span> <span class="n">i</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveServer2Hook.get_records"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveServer2Hook.get_records">[docs]</a> <span class="k">def</span> <span class="nf">get_records</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Get a set of records from a Hive query.</span> |
| |
| <span class="sd"> :param hql: hql to be executed.</span> |
| <span class="sd"> :type hql: str or list</span> |
| <span class="sd"> :param schema: target schema, default to 'default'.</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :param hive_conf: hive_conf to execute alone with the hql.</span> |
| <span class="sd"> :type hive_conf: dict</span> |
| <span class="sd"> :return: result of hive execution</span> |
| <span class="sd"> :rtype: list</span> |
| |
| <span class="sd"> >>> hh = HiveServer2Hook()</span> |
| <span class="sd"> >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100"</span> |
| <span class="sd"> >>> len(hh.get_records(sql))</span> |
| <span class="sd"> 100</span> |
| <span class="sd"> """</span> |
| <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_results</span><span class="p">(</span><span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="n">schema</span><span class="p">)[</span><span class="s1">'data'</span><span class="p">]</span></div> |
| |
| <div class="viewcode-block" id="HiveServer2Hook.get_pandas_df"><a class="viewcode-back" href="../../../_api/airflow/hooks/hive_hooks/index.html#airflow.hooks.hive_hooks.HiveServer2Hook.get_pandas_df">[docs]</a> <span class="k">def</span> <span class="nf">get_pandas_df</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Get a pandas dataframe from a Hive query</span> |
| |
| <span class="sd"> :param hql: hql to be executed.</span> |
| <span class="sd"> :type hql: str or list</span> |
| <span class="sd"> :param schema: target schema, default to 'default'.</span> |
| <span class="sd"> :type schema: str</span> |
| <span class="sd"> :return: result of hql execution</span> |
| <span class="sd"> :rtype: DataFrame</span> |
| |
| <span class="sd"> >>> hh = HiveServer2Hook()</span> |
| <span class="sd"> >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100"</span> |
| <span class="sd"> >>> df = hh.get_pandas_df(sql)</span> |
| <span class="sd"> >>> len(df.index)</span> |
| <span class="sd"> 100</span> |
| |
| <span class="sd"> :return: pandas.DateFrame</span> |
| <span class="sd"> """</span> |
| <span class="kn">import</span> <span class="nn">pandas</span> <span class="k">as</span> <span class="nn">pd</span> |
| <span class="n">res</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_results</span><span class="p">(</span><span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="n">schema</span><span class="p">)</span> |
| <span class="n">df</span> <span class="o">=</span> <span class="n">pd</span><span class="o">.</span><span class="n">DataFrame</span><span class="p">(</span><span class="n">res</span><span class="p">[</span><span class="s1">'data'</span><span class="p">])</span> |
| <span class="n">df</span><span class="o">.</span><span class="n">columns</span> <span class="o">=</span> <span class="p">[</span><span class="n">c</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">c</span> <span class="ow">in</span> <span class="n">res</span><span class="p">[</span><span class="s1">'header'</span><span class="p">]]</span> |
| <span class="k">return</span> <span class="n">df</span></div></div> |
| </pre></div> |
| |
| </div> |
| |
| </div> |
| <footer> |
| |
| |
| <hr/> |
| |
| <div role="contentinfo"> |
| <p> |
| |
| </p> |
| </div> |
| Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>. |
| |
| </footer> |
| |
| </div> |
| </div> |
| |
| </section> |
| |
| </div> |
| |
| |
| |
| <script type="text/javascript"> |
| jQuery(function () { |
| SphinxRtdTheme.Navigation.enable(true); |
| }); |
| </script> |
| |
| |
| |
| |
| |
| |
| </body> |
| </html> |