blob: 13410445f749437f64530caa257a69e0ae34d3a9 [file] [log] [blame]
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Apache Zeppelin 0.9.0 Documentation: Flink Interpreter for Apache Zeppelin</title>
<meta name="description" content="Apache Flink is an open source platform for distributed stream and batch data processing.">
<meta name="author" content="The Apache Software Foundation">
<!-- Enable responsive viewport -->
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<!-- Le HTML5 shim, for IE6-8 support of HTML elements -->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
<link href="/docs/0.9.0/assets/themes/zeppelin/font-awesome.min.css" rel="stylesheet">
<!-- Le styles -->
<link href="/docs/0.9.0/assets/themes/zeppelin/bootstrap/css/bootstrap.css" rel="stylesheet">
<link href="/docs/0.9.0/assets/themes/zeppelin/css/style.css?body=1" rel="stylesheet" type="text/css">
<link href="/docs/0.9.0/assets/themes/zeppelin/css/syntax.css" rel="stylesheet" type="text/css" media="screen" />
<!-- Le fav and touch icons -->
<!-- Update these with your own images
<link rel="shortcut icon" href="images/favicon.ico">
<link rel="apple-touch-icon" href="images/apple-touch-icon.png">
<link rel="apple-touch-icon" sizes="72x72" href="images/apple-touch-icon-72x72.png">
<link rel="apple-touch-icon" sizes="114x114" href="images/apple-touch-icon-114x114.png">
-->
<!-- Js -->
<script src="/docs/0.9.0/assets/themes/zeppelin/jquery-1.10.2.min.js"></script>
<script src="/docs/0.9.0/assets/themes/zeppelin/bootstrap/js/bootstrap.min.js"></script>
<script src="/docs/0.9.0/assets/themes/zeppelin/js/docs.js"></script>
<script src="/docs/0.9.0/assets/themes/zeppelin/js/anchor.min.js"></script>
<script src="/docs/0.9.0/assets/themes/zeppelin/js/toc.js"></script>
<script src="/docs/0.9.0/assets/themes/zeppelin/js/lunr.min.js"></script>
<script src="/docs/0.9.0/assets/themes/zeppelin/js/search.js"></script>
<!-- atom & rss feed -->
<link href="/docs/0.9.0/atom.xml" type="application/atom+xml" rel="alternate" title="Sitewide ATOM Feed">
<link href="/docs/0.9.0/rss.xml" type="application/rss+xml" rel="alternate" title="Sitewide RSS Feed">
<!-- Matomo -->
<script>
var _paq = window._paq = window._paq || [];
/* tracker methods like "setCustomDimension" should be called before "trackPageView" */
_paq.push(["setDoNotTrack", true]);
_paq.push(["disableCookies"]);
_paq.push(['trackPageView']);
_paq.push(['enableLinkTracking']);
(function() {
var u="https://analytics.apache.org/";
_paq.push(['setTrackerUrl', u+'matomo.php']);
_paq.push(['setSiteId', '69']);
var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];
g.async=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s);
})();
</script>
<!-- End Matomo Code -->
</head>
<body>
<div id="menu" class="navbar navbar-inverse navbar-fixed-top" role="navigation">
<div class="container navbar-container">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<div class="navbar-brand">
<a class="navbar-brand-main" href="http://zeppelin.apache.org">
<img src="/docs/0.9.0/assets/themes/zeppelin/img/zeppelin_logo.png" width="50"
style="margin-top: -2px;" alt="I'm zeppelin">
<span style="margin-left: 5px; font-size: 27px;">Zeppelin</span>
<a class="navbar-brand-version" href="/docs/0.9.0"
style="font-size: 15px; color: white;"> 0.9.0
</a>
</a>
</div>
</div>
<nav class="navbar-collapse collapse" role="navigation">
<ul class="nav navbar-nav">
<li>
<a href="#" data-toggle="dropdown" class="dropdown-toggle">Quick Start <b class="caret"></b></a>
<ul class="dropdown-menu">
<li class="title"><span>Getting Started</span></li>
<li><a href="/docs/0.9.0/quickstart/install.html">Install</a></li>
<li><a href="/docs/0.9.0/quickstart/explore_ui.html">Explore UI</a></li>
<li><a href="/docs/0.9.0/quickstart/tutorial.html">Tutorial</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Run Mode</span></li>
<li><a href="/docs/0.9.0/quickstart/kubernetes.html">Kubernetes</a></li>
<li><a href="/docs/0.9.0/quickstart/docker.html">Docker</a></li>
<li><a href="/docs/0.9.0/quickstart/yarn.html">Yarn</a></li>
<li role="separator" class="divider"></li>
<li><a href="/docs/0.9.0/quickstart/spark_with_zeppelin.html">Spark with Zeppelin</a></li>
<li><a href="/docs/0.9.0/quickstart/sql_with_zeppelin.html">SQL with Zeppelin</a></li>
<li><a href="/docs/0.9.0/quickstart/python_with_zeppelin.html">Python with Zeppelin</a></li>
</ul>
</li>
<li>
<a href="#" data-toggle="dropdown" class="dropdown-toggle">Usage<b class="caret"></b></a>
<ul class="dropdown-menu scrollable-menu">
<li class="title"><span>Dynamic Form</span></li>
<li><a href="/docs/0.9.0/usage/dynamic_form/intro.html">What is Dynamic Form?</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Display System</span></li>
<li><a href="/docs/0.9.0/usage/display_system/basic.html#text">Text Display</a></li>
<li><a href="/docs/0.9.0/usage/display_system/basic.html#html">HTML Display</a></li>
<li><a href="/docs/0.9.0/usage/display_system/basic.html#table">Table Display</a></li>
<li><a href="/docs/0.9.0/usage/display_system/basic.html#network">Network Display</a></li>
<li><a href="/docs/0.9.0/usage/display_system/angular_backend.html">Angular Display using Backend API</a></li>
<li><a href="/docs/0.9.0/usage/display_system/angular_frontend.html">Angular Display using Frontend API</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Interpreter</span></li>
<li><a href="/docs/0.9.0/usage/interpreter/overview.html">Overview</a></li>
<li><a href="/docs/0.9.0/usage/interpreter/interpreter_binding_mode.html">Interpreter Binding Mode</a></li>
<li><a href="/docs/0.9.0/usage/interpreter/user_impersonation.html">User Impersonation</a></li>
<li><a href="/docs/0.9.0/usage/interpreter/dependency_management.html">Dependency Management</a></li>
<li><a href="/docs/0.9.0/usage/interpreter/installation.html">Installing Interpreters</a></li>
<!--<li><a href="/docs/0.9.0/usage/interpreter/dynamic_loading.html">Dynamic Interpreter Loading (Experimental)</a></li>-->
<li><a href="/docs/0.9.0/usage/interpreter/execution_hooks.html">Execution Hooks (Experimental)</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Other Features</span></li>
<li><a href="/docs/0.9.0/usage/other_features/publishing_paragraphs.html">Publishing Paragraphs</a></li>
<li><a href="/docs/0.9.0/usage/other_features/personalized_mode.html">Personalized Mode</a></li>
<li><a href="/docs/0.9.0/usage/other_features/customizing_homepage.html">Customizing Zeppelin Homepage</a></li>
<li><a href="/docs/0.9.0/usage/other_features/notebook_actions.html">Notebook Actions</a></li>
<li><a href="/docs/0.9.0/usage/other_features/cron_scheduler.html">Cron Scheduler</a></li>
<li><a href="/docs/0.9.0/usage/other_features/zeppelin_context.html">Zeppelin Context</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>REST API</span></li>
<li><a href="/docs/0.9.0/usage/rest_api/interpreter.html">Interpreter API</a></li>
<li><a href="/docs/0.9.0/usage/rest_api/zeppelin_server.html">Zeppelin Server API</a></li>
<li><a href="/docs/0.9.0/usage/rest_api/notebook.html">Notebook API</a></li>
<li><a href="/docs/0.9.0/usage/rest_api/notebook_repository.html">Notebook Repository API</a></li>
<li><a href="/docs/0.9.0/usage/rest_api/configuration.html">Configuration API</a></li>
<li><a href="/docs/0.9.0/usage/rest_api/credential.html">Credential API</a></li>
<li><a href="/docs/0.9.0/usage/rest_api/helium.html">Helium API</a></li>
<li class="title"><span>Zeppelin SDK</span></li>
<li><a href="/docs/0.9.0/usage/zeppelin_sdk/client_api.html">Client API</a></li>
<li><a href="/docs/0.9.0/usage/zeppelin_sdk/session_api.html">Session API</a></li>
</ul>
</li>
<li>
<a href="#" data-toggle="dropdown" class="dropdown-toggle">Setup<b class="caret"></b></a>
<ul class="dropdown-menu scrollable-menu">
<li class="title"><span>Basics</span></li>
<li><a href="/docs/0.9.0/setup/basics/how_to_build.html">How to Build Zeppelin</a></li>
<li><a href="/docs/0.9.0/setup/basics/hadoop_integration.html">Hadoop Integration</a></li>
<li><a href="/docs/0.9.0/setup/basics/multi_user_support.html">Multi-user Support</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Deployment</span></li>
<!--<li><a href="/docs/0.9.0/setup/deployment/docker.html">Docker Image for Zeppelin</a></li>-->
<li><a href="/docs/0.9.0/setup/deployment/spark_cluster_mode.html#spark-standalone-mode">Spark Cluster Mode: Standalone</a></li>
<li><a href="/docs/0.9.0/setup/deployment/spark_cluster_mode.html#spark-on-yarn-mode">Spark Cluster Mode: YARN</a></li>
<li><a href="/docs/0.9.0/setup/deployment/spark_cluster_mode.html#spark-on-mesos-mode">Spark Cluster Mode: Mesos</a></li>
<li><a href="/docs/0.9.0/setup/deployment/flink_and_spark_cluster.html">Zeppelin with Flink, Spark Cluster</a></li>
<li><a href="/docs/0.9.0/setup/deployment/cdh.html">Zeppelin on CDH</a></li>
<li><a href="/docs/0.9.0/setup/deployment/virtual_machine.html">Zeppelin on VM: Vagrant</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Security</span></li>
<li><a href="/docs/0.9.0/setup/security/authentication_nginx.html">HTTP Basic Auth using NGINX</a></li>
<li><a href="/docs/0.9.0/setup/security/shiro_authentication.html">Shiro Authentication</a></li>
<li><a href="/docs/0.9.0/setup/security/notebook_authorization.html">Notebook Authorization</a></li>
<li><a href="/docs/0.9.0/setup/security/datasource_authorization.html">Data Source Authorization</a></li>
<li><a href="/docs/0.9.0/setup/security/http_security_headers.html">HTTP Security Headers</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Notebook Storage</span></li>
<li><a href="/docs/0.9.0/setup/storage/storage.html#notebook-storage-in-local-git-repository">Git Storage</a></li>
<li><a href="/docs/0.9.0/setup/storage/storage.html#notebook-storage-in-s3">S3 Storage</a></li>
<li><a href="/docs/0.9.0/setup/storage/storage.html#notebook-storage-in-azure">Azure Storage</a></li>
<li><a href="/docs/0.9.0/setup/storage/storage.html#notebook-storage-in-oss">OSS Storage</a></li>
<li><a href="/docs/0.9.0/setup/storage/storage.html#notebook-storage-in-zeppelinhub">ZeppelinHub Storage</a></li>
<li><a href="/docs/0.9.0/setup/storage/storage.html#notebook-storage-in-mongodb">MongoDB Storage</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Operation</span></li>
<li><a href="/docs/0.9.0/setup/operation/configuration.html">Configuration</a></li>
<li><a href="/docs/0.9.0/setup/operation/proxy_setting.html">Proxy Setting</a></li>
<li><a href="/docs/0.9.0/setup/operation/upgrading.html">Upgrading</a></li>
<li><a href="/docs/0.9.0/setup/operation/trouble_shooting.html">Trouble Shooting</a></li>
</ul>
</li>
<li>
<a href="#" data-toggle="dropdown" class="dropdown-toggle">Interpreter <b class="caret"></b></a>
<ul class="dropdown-menu scrollable-menu">
<li class="title"><span>Interpreters</span></li>
<li><a href="/docs/0.9.0/usage/interpreter/overview.html">Overview</a></li>
<li role="separator" class="divider"></li>
<li><a href="/docs/0.9.0/interpreter/spark.html">Spark</a></li>
<li><a href="/docs/0.9.0/interpreter/jdbc.html">JDBC</a></li>
<li><a href="/docs/0.9.0/interpreter/python.html">Python</a></li>
<li><a href="/docs/0.9.0/interpreter/r.html">R</a></li>
<li role="separator" class="divider"></li>
<li><a href="/docs/0.9.0/interpreter/alluxio.html">Alluxio</a></li>
<li><a href="/docs/0.9.0/interpreter/beam.html">Beam</a></li>
<li><a href="/docs/0.9.0/interpreter/bigquery.html">BigQuery</a></li>
<li><a href="/docs/0.9.0/interpreter/cassandra.html">Cassandra</a></li>
<li><a href="/docs/0.9.0/interpreter/elasticsearch.html">Elasticsearch</a></li>
<li><a href="/docs/0.9.0/interpreter/flink.html">Flink</a></li>
<li><a href="/docs/0.9.0/interpreter/geode.html">Geode</a></li>
<li><a href="/docs/0.9.0/interpreter/groovy.html">Groovy</a></li>
<li><a href="/docs/0.9.0/interpreter/hazelcastjet.html">Hazelcast Jet</a></li>
<li><a href="/docs/0.9.0/interpreter/hbase.html">HBase</a></li>
<li><a href="/docs/0.9.0/interpreter/hdfs.html">HDFS</a></li>
<li><a href="/docs/0.9.0/interpreter/hive.html">Hive</a></li>
<li><a href="/docs/0.9.0/interpreter/ignite.html">Ignite</a></li>
<li><a href="/docs/0.9.0/interpreter/influxdb.html">influxDB</a></li>
<li><a href="/docs/0.9.0/interpreter/java.html">Java</a></li>
<li><a href="/docs/0.9.0/interpreter/jupyter.html">Jupyter</a></li>
<li><a href="/docs/0.9.0/interpreter/kotlin.html">Kotlin</a></li>
<li><a href="/docs/0.9.0/interpreter/ksql.html">KSQL</a></li>
<li><a href="/docs/0.9.0/interpreter/kylin.html">Kylin</a></li>
<li><a href="/docs/0.9.0/interpreter/lens.html">Lens</a></li>
<li><a href="/docs/0.9.0/interpreter/livy.html">Livy</a></li>
<li><a href="/docs/0.9.0/interpreter/mahout.html">Mahout</a></li>
<li><a href="/docs/0.9.0/interpreter/markdown.html">Markdown</a></li>
<li><a href="/docs/0.9.0/interpreter/mongodb.html">MongoDB</a></li>
<li><a href="/docs/0.9.0/interpreter/neo4j.html">Neo4j</a></li>
<li><a href="/docs/0.9.0/interpreter/pig.html">Pig</a></li>
<li><a href="/docs/0.9.0/interpreter/postgresql.html">Postgresql, HAWQ</a></li>
<li><a href="/docs/0.9.0/interpreter/sap.html">SAP</a></li>
<li><a href="/docs/0.9.0/interpreter/scalding.html">Scalding</a></li>
<li><a href="/docs/0.9.0/interpreter/scio.html">Scio</a></li>
<li><a href="/docs/0.9.0/interpreter/shell.html">Shell</a></li>
<li><a href="/docs/0.9.0/interpreter/sparql.html">Sparql</a></li>
<li><a href="/docs/0.9.0/interpreter/submarine.html">Submarine</a></li>
</ul>
</li>
<li>
<a href="#" data-toggle="dropdown" class="dropdown-toggle">More<b class="caret"></b></a>
<ul class="dropdown-menu scrollable-menu" style="right: 0; left: auto;">
<li class="title"><span>Extending Zeppelin</span></li>
<li><a href="/docs/0.9.0/development/writing_zeppelin_interpreter.html">Writing Zeppelin Interpreter</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Helium (Experimental)</span></li>
<li><a href="/docs/0.9.0/development/helium/overview.html">Overview</a></li>
<li><a href="/docs/0.9.0/development/helium/writing_application.html">Writing Helium Application</a></li>
<li><a href="/docs/0.9.0/development/helium/writing_spell.html">Writing Helium Spell</a></li>
<li><a href="/docs/0.9.0/development/helium/writing_visualization_basic.html">Writing Helium Visualization: Basics</a></li>
<li><a href="/docs/0.9.0/development/helium/writing_visualization_transformation.html">Writing Helium Visualization: Transformation</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>Contributing to Zeppelin</span></li>
<li><a href="/docs/0.9.0/setup/basics/how_to_build.html">How to Build Zeppelin</a></li>
<li><a href="/docs/0.9.0/development/contribution/useful_developer_tools.html">Useful Developer Tools</a></li>
<li><a href="/docs/0.9.0/development/contribution/how_to_contribute_code.html">How to Contribute (code)</a></li>
<li><a href="/docs/0.9.0/development/contribution/how_to_contribute_website.html">How to Contribute (website)</a></li>
<li role="separator" class="divider"></li>
<li class="title"><span>External Resources</span></li>
<li><a target="_blank" href="https://zeppelin.apache.org/community.html">Mailing List</a></li>
<li><a target="_blank" href="https://cwiki.apache.org/confluence/display/ZEPPELIN/Zeppelin+Home">Apache Zeppelin Wiki</a></li>
<li><a target="_blank" href="http://stackoverflow.com/questions/tagged/apache-zeppelin">Stackoverflow Questions about Zeppelin</a></li>
</ul>
</li>
<li>
<a href="/docs/0.9.0/search.html" class="nav-search-link">
<span class="fa fa-search nav-search-icon"></span>
</a>
</li>
</ul>
</nav><!--/.navbar-collapse -->
</div>
</div>
<div class="content">
<!--<div class="hero-unit Flink Interpreter for Apache Zeppelin">
<h1></h1>
</div>
-->
<div class="row">
<div class="col-md-12">
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<h1>Flink interpreter for Apache Zeppelin</h1>
<div id="toc"></div>
<h2>Overview</h2>
<p><a href="https://flink.apache.org">Apache Flink</a> is an open source platform for distributed stream and batch data processing. Flink’s core is a streaming dataflow engine that provides data distribution, communication, and fault tolerance for distributed computations over data streams. Flink also builds batch processing on top of the streaming engine, overlaying native iteration support, managed memory, and program optimization.</p>
<p>In Zeppelin 0.9, we refactor the Flink interpreter in Zeppelin to support the latest version of Flink. <strong>Only Flink 1.10+ is supported, old versions of flink won&#39;t work.</strong>
Apache Flink is supported in Zeppelin with the Flink interpreter group which consists of the five interpreters listed below.</p>
<table class="table-configuration">
<tr>
<th>Name</th>
<th>Class</th>
<th>Description</th>
</tr>
<tr>
<td>%flink</td>
<td>FlinkInterpreter</td>
<td>Creates ExecutionEnvironment/StreamExecutionEnvironment/BatchTableEnvironment/StreamTableEnvironment and provides a Scala environment</td>
</tr>
<tr>
<td>%flink.pyflink</td>
<td>PyFlinkInterpreter</td>
<td>Provides a python environment</td>
</tr>
<tr>
<td>%flink.ipyflink</td>
<td>IPyFlinkInterpreter</td>
<td>Provides an ipython environment</td>
</tr>
<tr>
<td>%flink.ssql</td>
<td>FlinkStreamSqlInterpreter</td>
<td>Provides a stream sql environment</td>
</tr>
<tr>
<td>%flink.bsql</td>
<td>FlinkBatchSqlInterpreter</td>
<td>Provides a batch sql environment</td>
</tr>
</table>
<h2>Prerequisites</h2>
<ul>
<li>Download Flink 1.10 for scala 2.11 (Only scala-2.11 is supported, scala-2.12 is not supported yet in Zeppelin)</li>
</ul>
<h2>Configuration</h2>
<p>The Flink interpreter can be configured with properties provided by Zeppelin (as following table).
You can also add and set other flink properties which are not listed in the table. For a list of additional properties, refer to <a href="https://ci.apache.org/projects/flink/flink-docs-master/ops/config.html">Flink Available Properties</a>.
<table class="table-configuration">
<tr>
<th>Property</th>
<th>Default</th>
<th>Description</th>
</tr>
<tr>
<td><code>FLINK_HOME</code></td>
<td></td>
<td>Location of flink installation. It is must be specified, otherwise you can not use flink in Zeppelin</td>
</tr>
<tr>
<td><code>HADOOP_CONF_DIR</code></td>
<td></td>
<td>Location of hadoop conf, this is must be set if running in yarn mode</td>
</tr>
<tr>
<td><code>HIVE_CONF_DIR</code></td>
<td></td>
<td>Location of hive conf, this is must be set if you want to connect to hive metastore</td>
</tr>
<tr>
<td>flink.execution.mode</td>
<td>local</td>
<td>Execution mode of flink, e.g. local | yarn | remote</td>
</tr>
<tr>
<td>flink.execution.remote.host</td>
<td></td>
<td>Host name of running JobManager. Only used for remote mode</td>
</tr>
<tr>
<td>flink.execution.remote.port</td>
<td></td>
<td>Port of running JobManager. Only used for remote mode</td>
</tr>
<tr>
<td>flink.jm.memory</td>
<td>1024</td>
<td>Total number of memory(mb) of JobManager</td>
</tr>
<tr>
<td>flink.tm.memory</td>
<td>1024</td>
<td>Total number of memory(mb) of TaskManager</td>
</tr>
<tr>
<td>flink.tm.slot</td>
<td>1</td>
<td>Number of slot per TaskManager</td>
</tr>
<tr>
<td>local.number-taskmanager</td>
<td>4</td>
<td>Total number of TaskManagers in local mode</td>
</tr>
<tr>
<td>flink.yarn.appName</td>
<td>Zeppelin Flink Session</td>
<td>Yarn app name</td>
</tr>
<tr>
<td>flink.yarn.queue</td>
<td>default</td>
<td>queue name of yarn app</td>
</tr>
<tr>
<td>flink.webui.yarn.useProxy</td>
<td>false</td>
<td>whether use yarn proxy url as flink weburl, e.g. http://resource-manager:8088/proxy/application<em>1583396598068</em>0004</td>
</tr>
<tr>
<td>flink.webui.yarn.address</td>
<td></td>
<td>Set this value only when your yarn address is mapped to some other address, e.g. some cloud vender will map <code>http://resource-manager:8088</code> to <code>https://xxx-yarn.yy.cn/gateway/kkk/yarn</code></td>
</tr>
<tr>
<td>flink.udf.jars</td>
<td></td>
<td>Flink udf jars (comma separated), zeppelin will register udf in this jar automatically for user. These udf jars could be either local files or hdfs files if you have hadoop installed. The udf name is the class name.</td>
</tr>
<tr>
<td>flink.udf.jars.packages</td>
<td></td>
<td>Packages (comma separated) that would be searched for the udf defined in <code>flink.udf.jars</code>.</td>
</tr>
<tr>
<td>flink.execution.jars</td>
<td></td>
<td>Additional user jars (comma separated), these jars could be either local files or hdfs files if you have hadoop installed.</td>
</tr>
<tr>
<td>flink.execution.packages</td>
<td></td>
<td>Additional user packages (comma separated), e.g. org.apache.flink:flink-connector-kafka<em>2.11:1.10,org.apache.flink:flink-connector-kafka-base</em>2.11:1.10.0,org.apache.flink:flink-json:1.10.0</td>
</tr>
<tr>
<td>zeppelin.flink.concurrentBatchSql.max</td>
<td>10</td>
<td>Max concurrent sql of Batch Sql (<code>%flink.bsql</code>)</td>
</tr>
<tr>
<td>zeppelin.flink.concurrentStreamSql.max</td>
<td>10</td>
<td>Max concurrent sql of Stream Sql (<code>%flink.ssql</code>)</td>
</tr>
<tr>
<td>zeppelin.pyflink.python</td>
<td>python</td>
<td>Python binary executable for PyFlink</td>
</tr>
<tr>
<td>table.exec.resource.default-parallelism</td>
<td>1</td>
<td>Default parallelism for flink sql job</td>
</tr>
<tr>
<td>zeppelin.flink.scala.color</td>
<td>true</td>
<td>Whether display scala shell output in colorful format</td>
</tr></p>
<p><tr>
<td>zeppelin.flink.enableHive</td>
<td>false</td>
<td>Whether enable hive</td>
</tr>
<tr>
<td>zeppelin.flink.hive.version</td>
<td>2.3.4</td>
<td>Hive version that you would like to connect</td>
</tr>
<tr>
<td>zeppelin.flink.module.enableHive</td>
<td>false</td>
<td>Whether enable hive module, hive udf take precedence over flink udf if hive module is enabled.</td>
</tr>
<tr>
<td>zeppelin.flink.maxResult</td>
<td>1000</td>
<td>max number of row returned by sql interpreter</td>
</tr>
<tr>
<td><code>flink.interpreter.close.shutdown_cluster</code></td>
<td>true</td>
<td>Whether shutdown application when closing interpreter</td>
</tr>
<tr>
<td><code>zeppelin.interpreter.close.cancel_job</code></td>
<td>true</td>
<td>Whether cancel flink job when closing interpreter</td>
</tr>
<tr>
<td><code>zeppelin.flink.job.check_interval</code></td>
<td>1000</td>
<td>Check interval (in milliseconds) to check flink job progress</td>
</tr>
</table></p>
<h2>StreamExecutionEnvironment, ExecutionEnvironment, StreamTableEnvironment, BatchTableEnvironment</h2>
<p>Zeppelin will create 6 variables as flink scala (<code>%flink</code>) entry point:</p>
<ul>
<li><code>senv</code> (StreamExecutionEnvironment), </li>
<li><code>benv</code> (ExecutionEnvironment)</li>
<li><code>stenv</code> (StreamTableEnvironment for blink planner) </li>
<li><code>btenv</code> (BatchTableEnvironment for blink planner)</li>
<li><code>stenv_2</code> (StreamTableEnvironment for flink planner) </li>
<li><code>btenv_2</code> (BatchTableEnvironment for flink planner)</li>
</ul>
<p>And will create 6 variables as pyflink (<code>%flink.pyflink</code> or <code>%flink.ipyflink</code>) entry point:</p>
<ul>
<li><code>s_env</code> (StreamExecutionEnvironment), </li>
<li><code>b_env</code> (ExecutionEnvironment)</li>
<li><code>st_env</code> (StreamTableEnvironment for blink planner) </li>
<li><code>bt_env</code> (BatchTableEnvironment for blink planner)</li>
<li><code>st_env_2</code> (StreamTableEnvironment for flink planner) </li>
<li><code>bt_env_2</code> (BatchTableEnvironment for flink planner)</li>
</ul>
<h2>Blink/Flink Planner</h2>
<p>There are 2 planners supported by Flink&#39;s table api: <code>flink</code> &amp; <code>blink</code>.</p>
<ul>
<li>If you want to use DataSet api, and convert it to flink table then please use flink planner (<code>btenv_2</code> and <code>stenv_2</code>).</li>
<li>In other cases, we would always recommend you to use <code>blink</code> planner. This is also what flink batch/streaming sql interpreter use (<code>%flink.bsql</code> &amp; <code>%flink.ssql</code>)</li>
</ul>
<p>Check this <a href="https://ci.apache.org/projects/flink/flink-docs-release-1.10/dev/table/common.html#main-differences-between-the-two-planners">page</a> for the difference between flink planner and blink planner.</p>
<h2>Execution mode (Local/Remote/Yarn/Yarn Application)</h2>
<p>Flink in Zeppelin supports 4 execution modes (<code>flink.execution.mode</code>):</p>
<ul>
<li>Local</li>
<li>Remote</li>
<li>Yarn</li>
<li>Yarn Application</li>
</ul>
<h3>Run Flink in Local Mode</h3>
<p>Running Flink in Local mode will start a MiniCluster in local JVM. By default, the local MiniCluster will use port 8081, so make sure this port is available in your machine,
otherwise you can configure <code>rest.port</code> to specify another port. You can also specify <code>local.number-taskmanager</code> and <code>flink.tm.slot</code> to customize the number of TM and number of slots per TM,
because by default it is only 4 TM with 1 Slots which may not be enough for some cases.</p>
<h3>Run Flink in Remote Mode</h3>
<p>Running Flink in remote mode will connect to an existing flink cluster which could be standalone cluster or yarn session cluster. Besides specifying <code>flink.execution.mode</code> to be <code>remote</code>. You also need to specify
<code>flink.execution.remote.host</code> and <code>flink.execution.remote.port</code> to point to flink job manager.</p>
<h3>Run Flink in Yarn Mode</h3>
<p>In order to run flink in Yarn mode, you need to make the following settings:</p>
<ul>
<li>Set <code>flink.execution.mode</code> to <code>yarn</code></li>
<li>Set <code>HADOOP_CONF_DIR</code> in flink&#39;s interpreter setting or <code>zeppelin-env.sh</code>.</li>
<li>Make sure <code>hadoop</code> command is on your PATH. Because internally flink will call command <code>hadoop classpath</code> and load all the hadoop related jars in the flink interpreter process</li>
</ul>
<h3>Run Flink in Yarn Application Mode</h3>
<p>In the above yarn mode, there will be a separated flink interpreter process. This may run out of resources when there&#39;re many interpreter processes.
So it is recommended to use yarn application mode if you are using flink 1.11 or afterwards (yarn application mode is only supported after flink 1.11). In this mode flink interpreter runs in the JobManager which is in yarn container.
In order to run flink in yarn application mode, you need to make the following settings:</p>
<ul>
<li>Set <code>flink.execution.mode</code> to <code>yarn-application</code></li>
<li>Set <code>HADOOP_CONF_DIR</code> in flink&#39;s interpreter setting or <code>zeppelin-env.sh</code>.</li>
<li>Make sure <code>hadoop</code> command is on your PATH. Because internally flink will call command <code>hadoop classpath</code> and load all the hadoop related jars in the flink interpreter process</li>
</ul>
<h2>How to use Hive</h2>
<p>In order to use Hive in Flink, you have to make the following setting.</p>
<ul>
<li>Set <code>zeppelin.flink.enableHive</code> to be true</li>
<li>Set <code>zeppelin.flink.hive.version</code> to be the hive version you are using.</li>
<li>Set <code>HIVE_CONF_DIR</code> to be the location where <code>hive-site.xml</code> is located. Make sure hive metastore is started and you have configured <code>hive.metastore.uris</code> in <code>hive-site.xml</code></li>
<li>Copy the following dependencies to the lib folder of flink installation. 
<ul>
<li>flink-connector-hive_2.11–1.10.0.jar</li>
<li>flink-hadoop-compatibility_2.11–1.10.0.jar</li>
<li>hive-exec-2.x.jar (for hive 1.x, you need to copy hive-exec-1.x.jar, hive-metastore-1.x.jar, libfb303–0.9.2.jar and libthrift-0.9.2.jar)</li>
</ul></li>
</ul>
<h2>Flink Batch SQL</h2>
<p><code>%flink.bsql</code> is used for flink&#39;s batch sql. You can type <code>help</code> to get all the available commands.
It supports all the flink sql, including DML/DDL/DQL.</p>
<ul>
<li>Use <code>insert into</code> statement for batch ETL</li>
<li>Use <code>select</code> statement for batch data analytics </li>
</ul>
<h2>Flink Streaming SQL</h2>
<p><code>%flink.ssql</code> is used for flink&#39;s streaming sql. You just type <code>help</code> to get all the available commands.
It supports all the flink sql, including DML/DDL/DQL.</p>
<ul>
<li>Use <code>insert into</code> statement for streaming ETL</li>
<li>Use <code>select</code> statement for streaming data analytics</li>
</ul>
<h2>Streaming Data Visualization</h2>
<p>Zeppelin supports 3 types of streaming data analytics:
* Single
* Update
* Append</p>
<h3>type=single</h3>
<p>Single mode is for the case when the result of sql statement is always one row, such as the following example. The output format is HTML,
and you can specify paragraph local property <code>template</code> for the final output content template.
And you can use <code>{i}</code> as placeholder for the ith column of result.</p>
<p><center>
<img src="/docs/0.9.0/assets/themes/zeppelin/img/docs-img/flink_single_mode.gif" alt="Interactive Help">
</center></p>
<h3>type=update</h3>
<p>Update mode is suitable for the case when the output is more than one rows, and always will be updated continuously.
Here’s one example where we use group by.</p>
<p><center>
<img src="/docs/0.9.0/assets/themes/zeppelin/img/docs-img/flink_update_mode.gif" alt="Interactive Help">
</center></p>
<h3>type=append</h3>
<p>Append mode is suitable for the scenario where output data is always appended. E.g. the following example which use tumble window.</p>
<p><center>
<img src="/docs/0.9.0/assets/themes/zeppelin/img/docs-img/flink_append_mode.gif" alt="Interactive Help">
</center></p>
<h2>Flink UDF</h2>
<p>You can use Flink scala UDF or Python UDF in sql. UDF for batch and streaming sql is the same. Here&#39;re 2 examples.</p>
<ul>
<li>Scala UDF</li>
</ul>
<div class="highlight"><pre><code class="scala language-scala" data-lang="scala"><span class="o">%</span><span class="n">flink</span>
<span class="k">class</span> <span class="nc">ScalaUpper</span> <span class="k">extends</span> <span class="nc">ScalarFunction</span> <span class="o">{</span>
<span class="k">def</span> <span class="n">eval</span><span class="o">(</span><span class="n">str</span><span class="k">:</span> <span class="kt">String</span><span class="o">)</span> <span class="k">=</span> <span class="n">str</span><span class="o">.</span><span class="n">toUpperCase</span>
<span class="o">}</span>
<span class="n">btenv</span><span class="o">.</span><span class="n">registerFunction</span><span class="o">(</span><span class="s">&quot;scala_upper&quot;</span><span class="o">,</span> <span class="k">new</span> <span class="nc">ScalaUpper</span><span class="o">())</span>
</code></pre></div>
<ul>
<li>Python UDF</li>
</ul>
<div class="highlight"><pre><code class="python language-python" data-lang="python"><span class="o">%</span><span class="n">flink</span><span class="o">.</span><span class="n">pyflink</span>
<span class="k">class</span> <span class="nc">PythonUpper</span><span class="p">(</span><span class="n">ScalarFunction</span><span class="p">):</span>
<span class="k">def</span> <span class="nf">eval</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">s</span><span class="p">):</span>
<span class="k">return</span> <span class="n">s</span><span class="o">.</span><span class="n">upper</span><span class="p">()</span>
<span class="n">bt_env</span><span class="o">.</span><span class="n">register_function</span><span class="p">(</span><span class="s">&quot;python_upper&quot;</span><span class="p">,</span> <span class="n">udf</span><span class="p">(</span><span class="n">PythonUpper</span><span class="p">(),</span> <span class="n">DataTypes</span><span class="o">.</span><span class="n">STRING</span><span class="p">(),</span> <span class="n">DataTypes</span><span class="o">.</span><span class="n">STRING</span><span class="p">()))</span>
</code></pre></div>
<p>Zeppelin only supports scala and python for flink interpreter, if you want to write a java udf or the udf is pretty complicated which make it not suitable to write in Zeppelin,
then you can write the udf in IDE and build an udf jar.
In Zeppelin you just need to specify <code>flink.udf.jars</code> to this jar, and flink
interpreter will detect all the udfs in this jar and register all the udfs to TableEnvironment, the udf name is the class name.</p>
<h2>PyFlink(%flink.pyflink)</h2>
<p>In order to use PyFlink in Zeppelin, you just need to do the following configuration.
* Install apache-flink (e.g. pip install apache-flink)
* Set <code>zeppelin.pyflink.python</code> to the python executable where apache-flink is installed in case you have multiple python installed.
* Copy flink-python_2.11–1.10.0.jar from flink opt folder to flink lib folder</p>
<p>And PyFlink will create 6 variables for you:</p>
<ul>
<li><code>s_env</code> (StreamExecutionEnvironment), </li>
<li><code>b_env</code> (ExecutionEnvironment)</li>
<li><code>st_env</code> (StreamTableEnvironment for blink planner) </li>
<li><code>bt_env</code> (BatchTableEnvironment for blink planner)</li>
<li><code>st_env_2</code> (StreamTableEnvironment for flink planner) </li>
<li><code>bt_env_2</code> (BatchTableEnvironment for flink planner)</li>
</ul>
<h3>IPython Support(%flink.ipyflink)</h3>
<p>By default, zeppelin would use IPython in <code>%flink.pyflink</code> when IPython is available, Otherwise it would fall back to the original python implementation.
For the IPython features, you can refer doc<a href="python.html">Python Interpreter</a></p>
<h2>ZeppelinContext</h2>
<p>Zeppelin automatically injects <code>ZeppelinContext</code> as variable <code>z</code> in your Scala/Python environment. <code>ZeppelinContext</code> provides some additional functions and utilities.
See <a href="../usage/other_features/zeppelin_context.html">Zeppelin-Context</a> for more details. You can use <code>z</code> to display both flink DataSet and batch/stream table.</p>
<ul>
<li><p>Display DataSet
<center>
<img src="/docs/0.9.0/assets/themes/zeppelin/img/docs-img/flink_z_dataset.png" alt="Interactive Help">
</center></p></li>
<li><p>Display Batch Table
<center>
<img src="/docs/0.9.0/assets/themes/zeppelin/img/docs-img/flink_z_batch_table.png" alt="Interactive Help">
</center></p></li>
<li><p>Display Stream Table
<center>
<img src="/docs/0.9.0/assets/themes/zeppelin/img/docs-img/flink_z_stream_table.gif" alt="Interactive Help">
</center></p></li>
</ul>
<h2>Paragraph local properties</h2>
<p>In the section of <code>Streaming Data Visualization</code>, we demonstrate the different visualization type via paragraph local properties: <code>type</code>.
In this section, we will list and explain all the supported local properties in flink interpreter.</p>
<table class="table-configuration">
<tr>
<th>Property</th>
<th>Default</th>
<th>Description</th>
</tr>
<tr>
<td>type</td>
<td></td>
<td>Used in %flink.ssql to specify the streaming visualization type (single, update, append)</td>
</tr>
<tr>
<td>refreshInterval</td>
<td>3000</td>
<td>Used in `%flink.ssql` to specify frontend refresh interval for streaming data visualization.</td>
</tr>
<tr>
<td>template</td>
<td>{0}</td>
<td>Used in `%flink.ssql` to specify html template for `single` type of streaming data visualization, And you can use `{i}` as placeholder for the {i}th column of the result.</td>
</tr>
<tr>
<td>parallelism</td>
<td></td>
<td>Used in %flink.ssql & %flink.bsql to specify the flink sql job parallelism</td>
</tr>
<tr>
<td>maxParallelism</td>
<td></td>
<td>Used in %flink.ssql & %flink.bsql to specify the flink sql job max parallelism in case you want to change parallelism later. For more details, refer this [link](https://ci.apache.org/projects/flink/flink-docs-release-1.10/dev/parallel.html#setting-the-maximum-parallelism) </td>
</tr>
<tr>
<td>savepointDir</td>
<td></td>
<td>If you specify it, then when you cancel your flink job in Zeppelin, it would also do savepoint and store state in this directory. And when you resume your job, it would resume from this savepoint.</td>
</tr>
<tr>
<td>execution.savepoint.path</td>
<td></td>
<td>When you resume your job, it would resume from this savepoint path.</td>
</tr>
<tr>
<td>resumeFromSavepoint</td>
<td></td>
<td>Resume flink job from savepoint if you specify savepointDir.</td>
</tr>
<tr>
<td>resumeFromLatestCheckpoint</td>
<td></td>
<td>Resume flink job from latest checkpoint if you enable checkpoint.</td>
</tr>
<tr>
<td>runAsOne</td>
<td>false</td>
<td>All the insert into sql will run in a single flink job if this is true.</td>
</tr>
</table>
<h2>Tutorial Notes</h2>
<p>Zeppelin is shipped with several Flink tutorial notes which may be helpful for you. You can check for more features in the tutorial notes.</p>
</div>
</div>
<hr>
<footer>
<!-- <p>&copy; 2021 The Apache Software Foundation</p>-->
</footer>
</div>
</body>
</html>