| |
| |
| <!-- |
| Javascript to render AIRFLOW-XXX and PR references in text |
| as HTML links. |
| |
| Overrides extrahead block from sphinx_rtd_theme |
| https://www.sphinx-doc.org/en/master/templating.html |
| --> |
| |
| |
| <!DOCTYPE html> |
| <!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]--> |
| <!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]--> |
| <head> |
| <meta charset="utf-8"> |
| |
| <meta name="viewport" content="width=device-width, initial-scale=1.0"> |
| |
| <title>airflow.operators.hive_to_druid — Airflow Documentation</title> |
| |
| |
| |
| |
| <link rel="shortcut icon" href="../../../_static/pin_32.png"/> |
| |
| |
| |
| |
| |
| <script type="text/javascript" src="../../../_static/js/modernizr.min.js"></script> |
| |
| |
| <script type="text/javascript" id="documentation_options" data-url_root="../../../" src="../../../_static/documentation_options.js"></script> |
| <script type="text/javascript" src="../../../_static/jquery.js"></script> |
| <script type="text/javascript" src="../../../_static/underscore.js"></script> |
| <script type="text/javascript" src="../../../_static/doctools.js"></script> |
| <script type="text/javascript" src="../../../_static/language_data.js"></script> |
| <script type="text/javascript" src="../../../_static/jira-links.js"></script> |
| |
| <script type="text/javascript" src="../../../_static/js/theme.js"></script> |
| |
| |
| |
| |
| <link rel="stylesheet" href="../../../_static/css/theme.css" type="text/css" /> |
| <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" /> |
| <link rel="stylesheet" href="../../../_static/graphviz.css" type="text/css" /> |
| <link rel="stylesheet" href="../../../_static/exampleinclude.css" type="text/css" /> |
| <link rel="index" title="Index" href="../../../genindex.html" /> |
| <link rel="search" title="Search" href="../../../search.html" /> |
| |
| <script> |
| </script> |
| <style> |
| |
| </style> |
| |
| </head> |
| |
| <body class="wy-body-for-nav"> |
| |
| |
| <div class="wy-grid-for-nav"> |
| |
| <nav data-toggle="wy-nav-shift" class="wy-nav-side"> |
| <div class="wy-side-scroll"> |
| <div class="wy-side-nav-search" > |
| |
| |
| |
| <a href="../../../index.html" class="icon icon-home"> Airflow |
| |
| |
| |
| </a> |
| |
| |
| |
| |
| <div class="version"> |
| 1.10.8 |
| </div> |
| |
| |
| |
| |
| <div role="search"> |
| <form id="rtd-search-form" class="wy-form" action="../../../search.html" method="get"> |
| <input type="text" name="q" placeholder="Search docs" /> |
| <input type="hidden" name="check_keywords" value="yes" /> |
| <input type="hidden" name="area" value="default" /> |
| </form> |
| </div> |
| |
| |
| </div> |
| |
| <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation"> |
| |
| |
| |
| |
| |
| |
| <ul> |
| <li class="toctree-l1"><a class="reference internal" href="../../../project.html">Project</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../license.html">License</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../start.html">Quick Start</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../installation.html">Installation</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../tutorial.html">Tutorial</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../howto/index.html">How-to Guides</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../ui.html">UI / Screenshots</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../concepts.html">Concepts</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../profiling.html">Data Profiling</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../cli.html">Command Line Interface Reference</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../scheduler.html">Scheduling & Triggers</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../executor/index.html">Executor</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../plugins.html">Plugins</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../security.html">Security</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../timezone.html">Time zones</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../api.html">REST API Reference</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../integration.html">Integration</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../metrics.html">Metrics</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../errors.html">Error Tracking</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../kubernetes.html">Kubernetes</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../lineage.html">Lineage</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../dag-serialization.html">DAG Serialization</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../changelog.html">Changelog</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../best-practices.html">Best Practices</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../faq.html">FAQ</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../macros.html">Macros reference</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../privacy_notice.html">Privacy Notice</a></li> |
| </ul> |
| <p class="caption"><span class="caption-text">References</span></p> |
| <ul> |
| <li class="toctree-l1"><a class="reference internal" href="../../../_api/index.html">Python API</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="../../../configurations-ref.html">Configurations</a></li> |
| </ul> |
| |
| |
| |
| </div> |
| </div> |
| </nav> |
| |
| <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"> |
| |
| |
| <nav class="wy-nav-top" aria-label="top navigation"> |
| |
| <i data-toggle="wy-nav-top" class="fa fa-bars"></i> |
| <a href="../../../index.html">Airflow</a> |
| |
| </nav> |
| |
| |
| <div class="wy-nav-content"> |
| |
| <div class="rst-content"> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| <div role="navigation" aria-label="breadcrumbs navigation"> |
| |
| <ul class="wy-breadcrumbs"> |
| |
| <li><a href="../../../index.html">Docs</a> »</li> |
| |
| <li><a href="../../index.html">Module code</a> »</li> |
| |
| <li><a href="../operators.html">airflow.operators</a> »</li> |
| |
| <li>airflow.operators.hive_to_druid</li> |
| |
| |
| <li class="wy-breadcrumbs-aside"> |
| |
| </li> |
| |
| </ul> |
| |
| |
| <hr/> |
| </div> |
| <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article"> |
| <div itemprop="articleBody"> |
| |
| <h1>Source code for airflow.operators.hive_to_druid</h1><div class="highlight"><pre> |
| <span></span><span class="c1"># -*- coding: utf-8 -*-</span> |
| <span class="c1">#</span> |
| <span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span> |
| <span class="c1"># or more contributor license agreements. See the NOTICE file</span> |
| <span class="c1"># distributed with this work for additional information</span> |
| <span class="c1"># regarding copyright ownership. The ASF licenses this file</span> |
| <span class="c1"># to you under the Apache License, Version 2.0 (the</span> |
| <span class="c1"># "License"); you may not use this file except in compliance</span> |
| <span class="c1"># with the License. You may obtain a copy of the License at</span> |
| <span class="c1">#</span> |
| <span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span> |
| <span class="c1">#</span> |
| <span class="c1"># Unless required by applicable law or agreed to in writing,</span> |
| <span class="c1"># software distributed under the License is distributed on an</span> |
| <span class="c1"># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span> |
| <span class="c1"># KIND, either express or implied. See the License for the</span> |
| <span class="c1"># specific language governing permissions and limitations</span> |
| <span class="c1"># under the License.</span> |
| |
| <span class="kn">from</span> <span class="nn">airflow.hooks.hive_hooks</span> <span class="kn">import</span> <span class="n">HiveCliHook</span><span class="p">,</span> <span class="n">HiveMetastoreHook</span> |
| <span class="kn">from</span> <span class="nn">airflow.hooks.druid_hook</span> <span class="kn">import</span> <span class="n">DruidHook</span> |
| <span class="kn">from</span> <span class="nn">airflow.models</span> <span class="kn">import</span> <span class="n">BaseOperator</span> |
| <span class="kn">from</span> <span class="nn">airflow.utils.decorators</span> <span class="kn">import</span> <span class="n">apply_defaults</span> |
| |
| <div class="viewcode-block" id="LOAD_CHECK_INTERVAL"><a class="viewcode-back" href="../../../_api/airflow/operators/hive_to_druid/index.html#airflow.operators.hive_to_druid.LOAD_CHECK_INTERVAL">[docs]</a><span class="n">LOAD_CHECK_INTERVAL</span> <span class="o">=</span> <span class="mi">5</span></div> |
| <div class="viewcode-block" id="DEFAULT_TARGET_PARTITION_SIZE"><a class="viewcode-back" href="../../../_api/airflow/operators/hive_to_druid/index.html#airflow.operators.hive_to_druid.DEFAULT_TARGET_PARTITION_SIZE">[docs]</a><span class="n">DEFAULT_TARGET_PARTITION_SIZE</span> <span class="o">=</span> <span class="mi">5000000</span></div> |
| |
| |
| <div class="viewcode-block" id="HiveToDruidTransfer"><a class="viewcode-back" href="../../../_api/airflow/operators/hive_to_druid/index.html#airflow.operators.hive_to_druid.HiveToDruidTransfer">[docs]</a><span class="k">class</span> <span class="nc">HiveToDruidTransfer</span><span class="p">(</span><span class="n">BaseOperator</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Moves data from Hive to Druid, [del]note that for now the data is loaded</span> |
| <span class="sd"> into memory before being pushed to Druid, so this operator should</span> |
| <span class="sd"> be used for smallish amount of data.[/del]</span> |
| |
| <span class="sd"> :param sql: SQL query to execute against the Druid database. (templated)</span> |
| <span class="sd"> :type sql: str</span> |
| <span class="sd"> :param druid_datasource: the datasource you want to ingest into in druid</span> |
| <span class="sd"> :type druid_datasource: str</span> |
| <span class="sd"> :param ts_dim: the timestamp dimension</span> |
| <span class="sd"> :type ts_dim: str</span> |
| <span class="sd"> :param metric_spec: the metrics you want to define for your data</span> |
| <span class="sd"> :type metric_spec: list</span> |
| <span class="sd"> :param hive_cli_conn_id: the hive connection id</span> |
| <span class="sd"> :type hive_cli_conn_id: str</span> |
| <span class="sd"> :param druid_ingest_conn_id: the druid ingest connection id</span> |
| <span class="sd"> :type druid_ingest_conn_id: str</span> |
| <span class="sd"> :param metastore_conn_id: the metastore connection id</span> |
| <span class="sd"> :type metastore_conn_id: str</span> |
| <span class="sd"> :param hadoop_dependency_coordinates: list of coordinates to squeeze</span> |
| <span class="sd"> int the ingest json</span> |
| <span class="sd"> :type hadoop_dependency_coordinates: list[str]</span> |
| <span class="sd"> :param intervals: list of time intervals that defines segments,</span> |
| <span class="sd"> this is passed as is to the json object. (templated)</span> |
| <span class="sd"> :type intervals: list</span> |
| <span class="sd"> :param hive_tblproperties: additional properties for tblproperties in</span> |
| <span class="sd"> hive for the staging table</span> |
| <span class="sd"> :type hive_tblproperties: dict</span> |
| <span class="sd"> :param job_properties: additional properties for job</span> |
| <span class="sd"> :type job_properties: dict</span> |
| <span class="sd"> """</span> |
| |
| <div class="viewcode-block" id="HiveToDruidTransfer.template_fields"><a class="viewcode-back" href="../../../_api/airflow/operators/hive_to_druid/index.html#airflow.operators.hive_to_druid.HiveToDruidTransfer.template_fields">[docs]</a> <span class="n">template_fields</span> <span class="o">=</span> <span class="p">(</span><span class="s1">'sql'</span><span class="p">,</span> <span class="s1">'intervals'</span><span class="p">)</span></div> |
| <div class="viewcode-block" id="HiveToDruidTransfer.template_ext"><a class="viewcode-back" href="../../../_api/airflow/operators/hive_to_druid/index.html#airflow.operators.hive_to_druid.HiveToDruidTransfer.template_ext">[docs]</a> <span class="n">template_ext</span> <span class="o">=</span> <span class="p">(</span><span class="s1">'.sql'</span><span class="p">,)</span></div> |
| |
| <span class="nd">@apply_defaults</span> |
| <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span> |
| <span class="bp">self</span><span class="p">,</span> |
| <span class="n">sql</span><span class="p">,</span> |
| <span class="n">druid_datasource</span><span class="p">,</span> |
| <span class="n">ts_dim</span><span class="p">,</span> |
| <span class="n">metric_spec</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">hive_cli_conn_id</span><span class="o">=</span><span class="s1">'hive_cli_default'</span><span class="p">,</span> |
| <span class="n">druid_ingest_conn_id</span><span class="o">=</span><span class="s1">'druid_ingest_default'</span><span class="p">,</span> |
| <span class="n">metastore_conn_id</span><span class="o">=</span><span class="s1">'metastore_default'</span><span class="p">,</span> |
| <span class="n">hadoop_dependency_coordinates</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">intervals</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">num_shards</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span> |
| <span class="n">target_partition_size</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span> |
| <span class="n">query_granularity</span><span class="o">=</span><span class="s2">"NONE"</span><span class="p">,</span> |
| <span class="n">segment_granularity</span><span class="o">=</span><span class="s2">"DAY"</span><span class="p">,</span> |
| <span class="n">hive_tblproperties</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="n">job_properties</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> |
| <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span> |
| <span class="nb">super</span><span class="p">(</span><span class="n">HiveToDruidTransfer</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">sql</span> <span class="o">=</span> <span class="n">sql</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">druid_datasource</span> <span class="o">=</span> <span class="n">druid_datasource</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">ts_dim</span> <span class="o">=</span> <span class="n">ts_dim</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">intervals</span> <span class="o">=</span> <span class="n">intervals</span> <span class="ow">or</span> <span class="p">[</span><span class="s1">'{{ ds }}/{{ tomorrow_ds }}'</span><span class="p">]</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">num_shards</span> <span class="o">=</span> <span class="n">num_shards</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">target_partition_size</span> <span class="o">=</span> <span class="n">target_partition_size</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">query_granularity</span> <span class="o">=</span> <span class="n">query_granularity</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">segment_granularity</span> <span class="o">=</span> <span class="n">segment_granularity</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">metric_spec</span> <span class="o">=</span> <span class="n">metric_spec</span> <span class="ow">or</span> <span class="p">[{</span> |
| <span class="s2">"name"</span><span class="p">:</span> <span class="s2">"count"</span><span class="p">,</span> |
| <span class="s2">"type"</span><span class="p">:</span> <span class="s2">"count"</span><span class="p">}]</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_conn_id</span> <span class="o">=</span> <span class="n">hive_cli_conn_id</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">hadoop_dependency_coordinates</span> <span class="o">=</span> <span class="n">hadoop_dependency_coordinates</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">druid_ingest_conn_id</span> <span class="o">=</span> <span class="n">druid_ingest_conn_id</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">metastore_conn_id</span> <span class="o">=</span> <span class="n">metastore_conn_id</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">hive_tblproperties</span> <span class="o">=</span> <span class="n">hive_tblproperties</span> <span class="ow">or</span> <span class="p">{}</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">job_properties</span> <span class="o">=</span> <span class="n">job_properties</span> |
| |
| <div class="viewcode-block" id="HiveToDruidTransfer.execute"><a class="viewcode-back" href="../../../_api/airflow/operators/hive_to_druid/index.html#airflow.operators.hive_to_druid.HiveToDruidTransfer.execute">[docs]</a> <span class="k">def</span> <span class="nf">execute</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">context</span><span class="p">):</span> |
| <span class="n">hive</span> <span class="o">=</span> <span class="n">HiveCliHook</span><span class="p">(</span><span class="n">hive_cli_conn_id</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_conn_id</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Extracting data from Hive"</span><span class="p">)</span> |
| <span class="n">hive_table</span> <span class="o">=</span> <span class="s1">'druid.'</span> <span class="o">+</span> <span class="n">context</span><span class="p">[</span><span class="s1">'task_instance_key_str'</span><span class="p">]</span><span class="o">.</span><span class="n">replace</span><span class="p">(</span><span class="s1">'.'</span><span class="p">,</span> <span class="s1">'_'</span><span class="p">)</span> |
| <span class="n">sql</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">sql</span><span class="o">.</span><span class="n">strip</span><span class="p">()</span><span class="o">.</span><span class="n">strip</span><span class="p">(</span><span class="s1">';'</span><span class="p">)</span> |
| <span class="n">tblproperties</span> <span class="o">=</span> <span class="s1">''</span><span class="o">.</span><span class="n">join</span><span class="p">([</span><span class="s2">", '</span><span class="si">{}</span><span class="s2">' = '</span><span class="si">{}</span><span class="s2">'"</span> |
| <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> |
| <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">hive_tblproperties</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span> |
| <span class="n">hql</span> <span class="o">=</span> <span class="s2">"""</span><span class="se">\</span> |
| <span class="s2"> SET mapred.output.compress=false;</span> |
| <span class="s2"> SET hive.exec.compress.output=false;</span> |
| <span class="s2"> DROP TABLE IF EXISTS </span><span class="si">{hive_table}</span><span class="s2">;</span> |
| <span class="s2"> CREATE TABLE </span><span class="si">{hive_table}</span><span class="s2"></span> |
| <span class="s2"> ROW FORMAT DELIMITED FIELDS TERMINATED BY '</span><span class="se">\t</span><span class="s2">'</span> |
| <span class="s2"> STORED AS TEXTFILE</span> |
| <span class="s2"> TBLPROPERTIES ('serialization.null.format' = ''</span><span class="si">{tblproperties}</span><span class="s2">)</span> |
| <span class="s2"> AS</span> |
| <span class="s2"> </span><span class="si">{sql}</span><span class="s2"></span> |
| <span class="s2"> """</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">hive_table</span><span class="o">=</span><span class="n">hive_table</span><span class="p">,</span> <span class="n">tblproperties</span><span class="o">=</span><span class="n">tblproperties</span><span class="p">,</span> <span class="n">sql</span><span class="o">=</span><span class="n">sql</span><span class="p">)</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Running command:</span><span class="se">\n</span><span class="s2"> </span><span class="si">%s</span><span class="s2">"</span><span class="p">,</span> <span class="n">hql</span><span class="p">)</span> |
| <span class="n">hive</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span> |
| |
| <span class="n">m</span> <span class="o">=</span> <span class="n">HiveMetastoreHook</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">metastore_conn_id</span><span class="p">)</span> |
| |
| <span class="c1"># Get the Hive table and extract the columns</span> |
| <span class="n">t</span> <span class="o">=</span> <span class="n">m</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">hive_table</span><span class="p">)</span> |
| <span class="n">columns</span> <span class="o">=</span> <span class="p">[</span><span class="n">col</span><span class="o">.</span><span class="n">name</span> <span class="k">for</span> <span class="n">col</span> <span class="ow">in</span> <span class="n">t</span><span class="o">.</span><span class="n">sd</span><span class="o">.</span><span class="n">cols</span><span class="p">]</span> |
| |
| <span class="c1"># Get the path on hdfs</span> |
| <span class="n">hdfs_uri</span> <span class="o">=</span> <span class="n">m</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">hive_table</span><span class="p">)</span><span class="o">.</span><span class="n">sd</span><span class="o">.</span><span class="n">location</span> |
| <span class="n">pos</span> <span class="o">=</span> <span class="n">hdfs_uri</span><span class="o">.</span><span class="n">find</span><span class="p">(</span><span class="s1">'/user'</span><span class="p">)</span> |
| <span class="n">static_path</span> <span class="o">=</span> <span class="n">hdfs_uri</span><span class="p">[</span><span class="n">pos</span><span class="p">:]</span> |
| |
| <span class="n">schema</span><span class="p">,</span> <span class="n">table</span> <span class="o">=</span> <span class="n">hive_table</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'.'</span><span class="p">)</span> |
| |
| <span class="n">druid</span> <span class="o">=</span> <span class="n">DruidHook</span><span class="p">(</span><span class="n">druid_ingest_conn_id</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">druid_ingest_conn_id</span><span class="p">)</span> |
| |
| <span class="k">try</span><span class="p">:</span> |
| <span class="n">index_spec</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">construct_ingest_query</span><span class="p">(</span> |
| <span class="n">static_path</span><span class="o">=</span><span class="n">static_path</span><span class="p">,</span> |
| <span class="n">columns</span><span class="o">=</span><span class="n">columns</span><span class="p">,</span> |
| <span class="p">)</span> |
| |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Inserting rows into Druid, hdfs path: </span><span class="si">%s</span><span class="s2">"</span><span class="p">,</span> <span class="n">static_path</span><span class="p">)</span> |
| |
| <span class="n">druid</span><span class="o">.</span><span class="n">submit_indexing_job</span><span class="p">(</span><span class="n">index_spec</span><span class="p">)</span> |
| |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Load seems to have succeeded!"</span><span class="p">)</span> |
| <span class="k">finally</span><span class="p">:</span> |
| <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span> |
| <span class="s2">"Cleaning up by dropping the temp Hive table </span><span class="si">%s</span><span class="s2">"</span><span class="p">,</span> |
| <span class="n">hive_table</span> |
| <span class="p">)</span> |
| <span class="n">hql</span> <span class="o">=</span> <span class="s2">"DROP TABLE IF EXISTS </span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">hive_table</span><span class="p">)</span> |
| <span class="n">hive</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span></div> |
| |
| <div class="viewcode-block" id="HiveToDruidTransfer.construct_ingest_query"><a class="viewcode-back" href="../../../_api/airflow/operators/hive_to_druid/index.html#airflow.operators.hive_to_druid.HiveToDruidTransfer.construct_ingest_query">[docs]</a> <span class="k">def</span> <span class="nf">construct_ingest_query</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">static_path</span><span class="p">,</span> <span class="n">columns</span><span class="p">):</span> |
| <span class="sd">"""</span> |
| <span class="sd"> Builds an ingest query for an HDFS TSV load.</span> |
| |
| <span class="sd"> :param static_path: The path on hdfs where the data is</span> |
| <span class="sd"> :type static_path: str</span> |
| <span class="sd"> :param columns: List of all the columns that are available</span> |
| <span class="sd"> :type columns: list</span> |
| <span class="sd"> """</span> |
| |
| <span class="c1"># backward compatibility for num_shards,</span> |
| <span class="c1"># but target_partition_size is the default setting</span> |
| <span class="c1"># and overwrites the num_shards</span> |
| <span class="n">num_shards</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">num_shards</span> |
| <span class="n">target_partition_size</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">target_partition_size</span> |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">target_partition_size</span> <span class="o">==</span> <span class="o">-</span><span class="mi">1</span><span class="p">:</span> |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">num_shards</span> <span class="o">==</span> <span class="o">-</span><span class="mi">1</span><span class="p">:</span> |
| <span class="n">target_partition_size</span> <span class="o">=</span> <span class="n">DEFAULT_TARGET_PARTITION_SIZE</span> |
| <span class="k">else</span><span class="p">:</span> |
| <span class="n">num_shards</span> <span class="o">=</span> <span class="o">-</span><span class="mi">1</span> |
| |
| <span class="n">metric_names</span> <span class="o">=</span> <span class="p">[</span><span class="n">m</span><span class="p">[</span><span class="s1">'fieldName'</span><span class="p">]</span> <span class="k">for</span> <span class="n">m</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">metric_spec</span> <span class="k">if</span> <span class="n">m</span><span class="p">[</span><span class="s1">'type'</span><span class="p">]</span> <span class="o">!=</span> <span class="s1">'count'</span><span class="p">]</span> |
| |
| <span class="c1"># Take all the columns, which are not the time dimension</span> |
| <span class="c1"># or a metric, as the dimension columns</span> |
| <span class="n">dimensions</span> <span class="o">=</span> <span class="p">[</span><span class="n">c</span> <span class="k">for</span> <span class="n">c</span> <span class="ow">in</span> <span class="n">columns</span> <span class="k">if</span> <span class="n">c</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">metric_names</span> <span class="ow">and</span> <span class="n">c</span> <span class="o">!=</span> <span class="bp">self</span><span class="o">.</span><span class="n">ts_dim</span><span class="p">]</span> |
| |
| <span class="n">ingest_query_dict</span> <span class="o">=</span> <span class="p">{</span> |
| <span class="s2">"type"</span><span class="p">:</span> <span class="s2">"index_hadoop"</span><span class="p">,</span> |
| <span class="s2">"spec"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"dataSchema"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"metricsSpec"</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">metric_spec</span><span class="p">,</span> |
| <span class="s2">"granularitySpec"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"queryGranularity"</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">query_granularity</span><span class="p">,</span> |
| <span class="s2">"intervals"</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">intervals</span><span class="p">,</span> |
| <span class="s2">"type"</span><span class="p">:</span> <span class="s2">"uniform"</span><span class="p">,</span> |
| <span class="s2">"segmentGranularity"</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">segment_granularity</span><span class="p">,</span> |
| <span class="p">},</span> |
| <span class="s2">"parser"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"type"</span><span class="p">:</span> <span class="s2">"string"</span><span class="p">,</span> |
| <span class="s2">"parseSpec"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"columns"</span><span class="p">:</span> <span class="n">columns</span><span class="p">,</span> |
| <span class="s2">"dimensionsSpec"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"dimensionExclusions"</span><span class="p">:</span> <span class="p">[],</span> |
| <span class="s2">"dimensions"</span><span class="p">:</span> <span class="n">dimensions</span><span class="p">,</span> <span class="c1"># list of names</span> |
| <span class="s2">"spatialDimensions"</span><span class="p">:</span> <span class="p">[]</span> |
| <span class="p">},</span> |
| <span class="s2">"timestampSpec"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"column"</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">ts_dim</span><span class="p">,</span> |
| <span class="s2">"format"</span><span class="p">:</span> <span class="s2">"auto"</span> |
| <span class="p">},</span> |
| <span class="s2">"format"</span><span class="p">:</span> <span class="s2">"tsv"</span> |
| <span class="p">}</span> |
| <span class="p">},</span> |
| <span class="s2">"dataSource"</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">druid_datasource</span> |
| <span class="p">},</span> |
| <span class="s2">"tuningConfig"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"type"</span><span class="p">:</span> <span class="s2">"hadoop"</span><span class="p">,</span> |
| <span class="s2">"jobProperties"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"mapreduce.job.user.classpath.first"</span><span class="p">:</span> <span class="s2">"false"</span><span class="p">,</span> |
| <span class="s2">"mapreduce.map.output.compress"</span><span class="p">:</span> <span class="s2">"false"</span><span class="p">,</span> |
| <span class="s2">"mapreduce.output.fileoutputformat.compress"</span><span class="p">:</span> <span class="s2">"false"</span><span class="p">,</span> |
| <span class="p">},</span> |
| <span class="s2">"partitionsSpec"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"type"</span><span class="p">:</span> <span class="s2">"hashed"</span><span class="p">,</span> |
| <span class="s2">"targetPartitionSize"</span><span class="p">:</span> <span class="n">target_partition_size</span><span class="p">,</span> |
| <span class="s2">"numShards"</span><span class="p">:</span> <span class="n">num_shards</span><span class="p">,</span> |
| <span class="p">},</span> |
| <span class="p">},</span> |
| <span class="s2">"ioConfig"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"inputSpec"</span><span class="p">:</span> <span class="p">{</span> |
| <span class="s2">"paths"</span><span class="p">:</span> <span class="n">static_path</span><span class="p">,</span> |
| <span class="s2">"type"</span><span class="p">:</span> <span class="s2">"static"</span> |
| <span class="p">},</span> |
| <span class="s2">"type"</span><span class="p">:</span> <span class="s2">"hadoop"</span> |
| <span class="p">}</span> |
| <span class="p">}</span> |
| <span class="p">}</span> |
| |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">job_properties</span><span class="p">:</span> |
| <span class="n">ingest_query_dict</span><span class="p">[</span><span class="s1">'spec'</span><span class="p">][</span><span class="s1">'tuningConfig'</span><span class="p">][</span><span class="s1">'jobProperties'</span><span class="p">]</span> \ |
| <span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">job_properties</span><span class="p">)</span> |
| |
| <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">hadoop_dependency_coordinates</span><span class="p">:</span> |
| <span class="n">ingest_query_dict</span><span class="p">[</span><span class="s1">'hadoopDependencyCoordinates'</span><span class="p">]</span> \ |
| <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">hadoop_dependency_coordinates</span> |
| |
| <span class="k">return</span> <span class="n">ingest_query_dict</span></div></div> |
| </pre></div> |
| |
| </div> |
| |
| </div> |
| |
| |
| <footer> |
| |
| |
| <hr/> |
| |
| <div role="contentinfo"> |
| <p> |
| |
| </p> |
| </div> |
| Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>. |
| <div class="footer">This page uses <a href="https://analytics.google.com/"> |
| Google Analytics</a> to collect statistics. You can disable it by blocking |
| the JavaScript coming from www.google-analytics.com. Check our |
| <a href="../../../privacy_notice.html">Privacy Policy</a> |
| for more details. |
| </div> |
| |
| |
| </footer> |
| |
| </div> |
| </div> |
| |
| </section> |
| |
| </div> |
| |
| |
| |
| <script type="text/javascript"> |
| jQuery(function () { |
| SphinxRtdTheme.Navigation.enable(true); |
| }); |
| </script> |
| |
| |
| |
| |
| <!-- Theme Analytics --> |
| <script> |
| (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ |
| (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), |
| m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) |
| })(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); |
| |
| ga('create', 'UA-140539454-1', 'auto'); |
| ga('send', 'pageview'); |
| </script> |
| |
| |
| |
| |
| </body> |
| </html> |