blob: e18c1b45a1c8e1c54aee55e75736addee7582d81 [file] [log] [blame]
<!DOCTYPE html>
<html lang="en" dir=ZgotmplZ>
<head>
<link rel="stylesheet" href="/bootstrap/css/bootstrap.min.css">
<script src="/bootstrap/js/bootstrap.bundle.min.js"></script>
<link rel="stylesheet" type="text/css" href="/font-awesome/css/font-awesome.min.css">
<script src="/js/anchor.min.js"></script>
<script src="/js/flink.js"></script>
<link rel="canonical" href="https://flink.apache.org/2024/03/18/announcing-the-release-of-apache-flink-1.19/">
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="The Apache Flink PMC is pleased to announce the release of Apache Flink 1.19.0. As usual, we are looking at a packed release with a wide variety of improvements and new features. Overall, 162 people contributed to this release completing 33 FLIPs and 600&#43; issues. Thank you!
Let&rsquo;s dive into the highlights.
Flink SQL Improvements # Custom Parallelism for Table/SQL Sources # Now in Flink 1.19, you can set a custom parallelism for performance tuning via the scan.">
<meta name="theme-color" content="#FFFFFF"><meta property="og:title" content="Announcing the Release of Apache Flink 1.19" />
<meta property="og:description" content="The Apache Flink PMC is pleased to announce the release of Apache Flink 1.19.0. As usual, we are looking at a packed release with a wide variety of improvements and new features. Overall, 162 people contributed to this release completing 33 FLIPs and 600&#43; issues. Thank you!
Let&rsquo;s dive into the highlights.
Flink SQL Improvements # Custom Parallelism for Table/SQL Sources # Now in Flink 1.19, you can set a custom parallelism for performance tuning via the scan." />
<meta property="og:type" content="article" />
<meta property="og:url" content="https://flink.apache.org/2024/03/18/announcing-the-release-of-apache-flink-1.19/" /><meta property="article:section" content="posts" />
<meta property="article:published_time" content="2024-03-18T08:00:00+00:00" />
<meta property="article:modified_time" content="2024-03-18T08:00:00+00:00" />
<title>Announcing the Release of Apache Flink 1.19 | Apache Flink</title>
<link rel="manifest" href="/manifest.json">
<link rel="icon" href="/favicon.png" type="image/x-icon">
<link rel="stylesheet" href="/book.min.22eceb4d17baa9cdc0f57345edd6f215a40474022dfee39b63befb5fb3c596b5.css" integrity="sha256-IuzrTRe6qc3A9XNF7dbyFaQEdAIt/uObY777X7PFlrU=">
<script defer src="/en.search.min.2698f0d1b683dae4d6cb071668b310a55ebcf1c48d11410a015a51d90105b53e.js" integrity="sha256-Jpjw0baD2uTWywcWaLMQpV688cSNEUEKAVpR2QEFtT4="></script>
<!--
Made with Book Theme
https://github.com/alex-shpak/hugo-book
-->
<meta name="generator" content="Hugo 0.124.1">
<script>
var _paq = window._paq = window._paq || [];
_paq.push(['disableCookies']);
_paq.push(["setDomains", ["*.flink.apache.org","*.nightlies.apache.org/flink"]]);
_paq.push(['trackPageView']);
_paq.push(['enableLinkTracking']);
(function() {
var u="//analytics.apache.org/";
_paq.push(['setTrackerUrl', u+'matomo.php']);
_paq.push(['setSiteId', '1']);
var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];
g.async=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s);
})();
</script>
</head>
<body dir=ZgotmplZ>
<header>
<nav class="navbar navbar-expand-xl">
<div class="container-fluid">
<a class="navbar-brand" href="/">
<img src="/img/logo/png/100/flink_squirrel_100_color.png" alt="Apache Flink" height="47" width="47" class="d-inline-block align-text-middle">
<span>Apache Flink</span>
</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarSupportedContent" aria-controls="navbarSupportedContent" aria-expanded="false" aria-label="Toggle navigation">
<i class="fa fa-bars navbar-toggler-icon"></i>
</button>
<div class="collapse navbar-collapse" id="navbarSupportedContent">
<ul class="navbar-nav">
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" role="button" data-bs-toggle="dropdown" aria-expanded="false">About</a>
<ul class="dropdown-menu">
<li>
<a class="dropdown-item" href="/what-is-flink/flink-architecture/">Architecture</a>
</li>
<li>
<a class="dropdown-item" href="/what-is-flink/flink-applications/">Applications</a>
</li>
<li>
<a class="dropdown-item" href="/what-is-flink/flink-operations/">Operations</a>
</li>
<li>
<a class="dropdown-item" href="/what-is-flink/use-cases/">Use Cases</a>
</li>
<li>
<a class="dropdown-item" href="/what-is-flink/powered-by/">Powered By</a>
</li>
<li>
<a class="dropdown-item" href="/what-is-flink/roadmap/">Roadmap</a>
</li>
<li>
<a class="dropdown-item" href="/what-is-flink/community/">Community & Project Info</a>
</li>
<li>
<a class="dropdown-item" href="/what-is-flink/security/">Security</a>
</li>
<li>
<a class="dropdown-item" href="/what-is-flink/special-thanks/">Special Thanks</a>
</li>
</ul>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" role="button" data-bs-toggle="dropdown" aria-expanded="false">Getting Started</a>
<ul class="dropdown-menu">
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-docs-stable/docs/try-flink/local_installation/">With Flink<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-kubernetes-operator-docs-stable/docs/try-flink-kubernetes-operator/quick-start/">With Flink Kubernetes Operator<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-cdc-docs-stable/docs/get-started/introduction/">With Flink CDC<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-ml-docs-stable/docs/try-flink-ml/quick-start/">With Flink ML<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-statefun-docs-stable/getting-started/project-setup.html">With Flink Stateful Functions<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-docs-stable/docs/learn-flink/overview/">Training Course<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
</ul>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" role="button" data-bs-toggle="dropdown" aria-expanded="false">Documentation</a>
<ul class="dropdown-menu">
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-docs-stable/">Flink 1.19 (stable)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-docs-master/">Flink Master (snapshot)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-kubernetes-operator-docs-stable/">Kubernetes Operator 1.8 (latest)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-kubernetes-operator-docs-main">Kubernetes Operator Main (snapshot)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-cdc-docs-stable">CDC 3.0 (stable)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-cdc-docs-master">CDC Master (snapshot)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-ml-docs-stable/">ML 2.3 (stable)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-ml-docs-master">ML Master (snapshot)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-statefun-docs-stable/">Stateful Functions 3.3 (stable)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
<li>
<a class="dropdown-item" href="https://nightlies.apache.org/flink/flink-statefun-docs-master">Stateful Functions Master (snapshot)<i class="link fa fa-external-link title" aria-hidden="true"></i>
</a>
</li>
</ul>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" role="button" data-bs-toggle="dropdown" aria-expanded="false">How to Contribute</a>
<ul class="dropdown-menu">
<li>
<a class="dropdown-item" href="/how-to-contribute/overview/">Overview</a>
</li>
<li>
<a class="dropdown-item" href="/how-to-contribute/contribute-code/">Contribute Code</a>
</li>
<li>
<a class="dropdown-item" href="/how-to-contribute/reviewing-prs/">Review Pull Requests</a>
</li>
<li>
<a class="dropdown-item" href="/how-to-contribute/code-style-and-quality-preamble/">Code Style and Quality Guide</a>
</li>
<li>
<a class="dropdown-item" href="/how-to-contribute/contribute-documentation/">Contribute Documentation</a>
</li>
<li>
<a class="dropdown-item" href="/how-to-contribute/documentation-style-guide/">Documentation Style Guide</a>
</li>
<li>
<a class="dropdown-item" href="/how-to-contribute/improve-website/">Contribute to the Website</a>
</li>
<li>
<a class="dropdown-item" href="/how-to-contribute/getting-help/">Getting Help</a>
</li>
</ul>
</li>
<li class="nav-item">
<a class="nav-link" href="/posts/">Flink Blog</a>
</li>
<li class="nav-item">
<a class="nav-link" href="/downloads/">Downloads</a>
</li>
</ul>
<div class="book-search">
<div class="book-search-spinner hidden">
<i class="fa fa-refresh fa-spin"></i>
</div>
<form class="search-bar d-flex" onsubmit="return false;"su>
<input type="text" id="book-search-input" placeholder="Search" aria-label="Search" maxlength="64" data-hotkeys="s/">
<i class="fa fa-search search"></i>
<i class="fa fa-circle-o-notch fa-spin spinner"></i>
</form>
<div class="book-search-spinner hidden"></div>
<ul id="book-search-results"></ul>
</div>
</div>
</div>
</nav>
<div class="navbar-clearfix"></div>
</header>
<main class="flex">
<section class="container book-page">
<article class="markdown">
<h1>
<a href="/2024/03/18/announcing-the-release-of-apache-flink-1.19/">Announcing the Release of Apache Flink 1.19</a>
</h1>
March 18, 2024 -
Lincoln Lee
<a href="https://twitter.com/lincoln_86xy">(@lincoln_86xy)</a>
<p><p>The Apache Flink PMC is pleased to announce the release of Apache Flink 1.19.0. As usual, we are
looking at a packed release with a wide variety of improvements and new features. Overall, 162
people contributed to this release completing 33 FLIPs and 600+ issues. Thank you!</p>
<p>Let&rsquo;s dive into the highlights.</p>
<h1 id="flink-sql-improvements">
Flink SQL Improvements
<a class="anchor" href="#flink-sql-improvements">#</a>
</h1>
<h2 id="custom-parallelism-for-tablesql-sources">
Custom Parallelism for Table/SQL Sources
<a class="anchor" href="#custom-parallelism-for-tablesql-sources">#</a>
</h2>
<p>Now in Flink 1.19, you can set a custom parallelism for performance tuning via the <code>scan.parallelism</code>
option. The first available connector is DataGen (Kafka connector is on the way). Here is an example
using SQL Client:</p>
<div class="highlight"><pre tabindex="0" class="chroma"><code class="language-sql" data-lang="sql"><span class="line"><span class="cl"><span class="c1">-- set parallelism within the ddl
</span></span></span><span class="line"><span class="cl"><span class="c1"></span><span class="k">CREATE</span><span class="w"> </span><span class="k">TABLE</span><span class="w"> </span><span class="n">Orders</span><span class="w"> </span><span class="p">(</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="n">order_number</span><span class="w"> </span><span class="nb">BIGINT</span><span class="p">,</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="n">price</span><span class="w"> </span><span class="nb">DECIMAL</span><span class="p">(</span><span class="mi">32</span><span class="p">,</span><span class="mi">2</span><span class="p">),</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="n">buyer</span><span class="w"> </span><span class="k">ROW</span><span class="o">&lt;</span><span class="n">first_name</span><span class="w"> </span><span class="n">STRING</span><span class="p">,</span><span class="w"> </span><span class="n">last_name</span><span class="w"> </span><span class="n">STRING</span><span class="o">&gt;</span><span class="p">,</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="n">order_time</span><span class="w"> </span><span class="k">TIMESTAMP</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="p">)</span><span class="w"> </span><span class="k">WITH</span><span class="w"> </span><span class="p">(</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="s1">&#39;connector&#39;</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="s1">&#39;datagen&#39;</span><span class="p">,</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="s1">&#39;scan.parallelism&#39;</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="s1">&#39;4&#39;</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="p">);</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="c1">-- or set parallelism via dynamic table option
</span></span></span><span class="line"><span class="cl"><span class="c1"></span><span class="k">SELECT</span><span class="w"> </span><span class="o">*</span><span class="w"> </span><span class="k">FROM</span><span class="w"> </span><span class="n">Orders</span><span class="w"> </span><span class="cm">/*+ OPTIONS(&#39;scan.parallelism&#39;=&#39;4&#39;) */</span><span class="p">;</span><span class="w">
</span></span></span></code></pre></div><p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/sourcessinks/#scan-table-source">Documentation</a></li>
<li><a href="https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=263429150">FLIP-367: Support Setting Parallelism for Table/SQL Sources</a></li>
</ul>
<h2 id="configurable-sql-gateway-java-options">
Configurable SQL Gateway Java Options
<a class="anchor" href="#configurable-sql-gateway-java-options">#</a>
</h2>
<p>A new option <code>env.java.opts.sql-gateway</code> for specifying the Java options is introduced in Flink 1.19,
so you can fine-tune the memory settings, garbage collection behavior, and other relevant Java
parameters for SQL Gateway.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://issues.apache.org/jira/browse/FLINK-33203">FLINK-33203</a></li>
</ul>
<h2 id="configure-different-state-ttls-using-sql-hint">
Configure Different State TTLs Using SQL Hint
<a class="anchor" href="#configure-different-state-ttls-using-sql-hint">#</a>
</h2>
<p>Starting from Flink 1.18, Table API and SQL users can set state time-to-live (TTL) individually for
stateful operators via the SQL compiled plan. In Flink 1.19, users have a more flexible way to
specify custom TTL values for regular joins and group aggregations directly within their queries by <a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/sql/queries/hints/#state-ttl-hints">utilizing the STATE_TTL hint</a>.
This improvement means that you no longer need to alter your compiled plan to set specific TTLs for
these frequently used operators. With the introduction of <code>STATE_TTL</code> hints, you can streamline your workflow and
dynamically adjust the TTL based on your operational requirements.</p>
<p>Here is an example:</p>
<div class="highlight"><pre tabindex="0" class="chroma"><code class="language-sql" data-lang="sql"><span class="line"><span class="cl"><span class="c1">-- set state ttl for join
</span></span></span><span class="line"><span class="cl"><span class="c1"></span><span class="k">SELECT</span><span class="w"> </span><span class="cm">/*+ STATE_TTL(&#39;Orders&#39;= &#39;1d&#39;, &#39;Customers&#39; = &#39;20d&#39;) */</span><span class="w"> </span><span class="o">*</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="k">FROM</span><span class="w"> </span><span class="n">Orders</span><span class="w"> </span><span class="k">LEFT</span><span class="w"> </span><span class="k">OUTER</span><span class="w"> </span><span class="k">JOIN</span><span class="w"> </span><span class="n">Customers</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="k">ON</span><span class="w"> </span><span class="n">Orders</span><span class="p">.</span><span class="n">o_custkey</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="n">Customers</span><span class="p">.</span><span class="n">c_custkey</span><span class="p">;</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="c1">-- set state ttl for aggregation
</span></span></span><span class="line"><span class="cl"><span class="c1"></span><span class="k">SELECT</span><span class="w"> </span><span class="cm">/*+ STATE_TTL(&#39;o&#39; = &#39;1d&#39;) */</span><span class="w"> </span><span class="n">o_orderkey</span><span class="p">,</span><span class="w"> </span><span class="k">SUM</span><span class="p">(</span><span class="n">o_totalprice</span><span class="p">)</span><span class="w"> </span><span class="k">AS</span><span class="w"> </span><span class="n">revenue</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="k">FROM</span><span class="w"> </span><span class="n">Orders</span><span class="w"> </span><span class="k">AS</span><span class="w"> </span><span class="n">o</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="k">GROUP</span><span class="w"> </span><span class="k">BY</span><span class="w"> </span><span class="n">o_orderkey</span><span class="p">;</span><span class="w">
</span></span></span></code></pre></div><p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/sql/queries/hints/#state-ttl-hints">Documentation</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-373%3A&#43;Support&#43;Configuring&#43;Different&#43;State&#43;TTLs&#43;using&#43;SQL&#43;Hint">FLIP-373: Support Configuring Different State TTLs using SQL Hint</a></li>
</ul>
<h2 id="named-parameters-for-functions-and-procedures">
Named Parameters for Functions and Procedures
<a class="anchor" href="#named-parameters-for-functions-and-procedures">#</a>
</h2>
<p>Named parameters now can be used when calling a function or stored procedure. With named parameters,
users do not need to strictly specify the parameter position, just specify the parameter name and its
corresponding value. At the same time, if non-essential parameters are not specified, they will default to being filled with null.</p>
<p>Here&rsquo;s an example of defining a function with one mandatory parameter and two optional parameters using named parameters:</p>
<div class="highlight"><pre tabindex="0" class="chroma"><code class="language-java" data-lang="java"><span class="line"><span class="cl"><span class="kd">public</span><span class="w"> </span><span class="kd">static</span><span class="w"> </span><span class="kd">class</span> <span class="nc">NamedArgumentsTableFunction</span><span class="w"> </span><span class="kd">extends</span><span class="w"> </span><span class="n">TableFunction</span><span class="o">&lt;</span><span class="n">Object</span><span class="o">&gt;</span><span class="w"> </span><span class="p">{</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="nd">@FunctionHint</span><span class="p">(</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="n">output</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="nd">@DataTypeHint</span><span class="p">(</span><span class="s">&#34;STRING&#34;</span><span class="p">),</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="n">arguments</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="p">{</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="nd">@ArgumentHint</span><span class="p">(</span><span class="n">name</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="s">&#34;in1&#34;</span><span class="p">,</span><span class="w"> </span><span class="n">isOptional</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="kc">false</span><span class="p">,</span><span class="w"> </span><span class="n">type</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="nd">@DataTypeHint</span><span class="p">(</span><span class="s">&#34;STRING&#34;</span><span class="p">)),</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="nd">@ArgumentHint</span><span class="p">(</span><span class="n">name</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="s">&#34;in2&#34;</span><span class="p">,</span><span class="w"> </span><span class="n">isOptional</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="kc">true</span><span class="p">,</span><span class="w"> </span><span class="n">type</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="nd">@DataTypeHint</span><span class="p">(</span><span class="s">&#34;STRING&#34;</span><span class="p">)),</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="nd">@ArgumentHint</span><span class="p">(</span><span class="n">name</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="s">&#34;in3&#34;</span><span class="p">,</span><span class="w"> </span><span class="n">isOptional</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="kc">true</span><span class="p">,</span><span class="w"> </span><span class="n">type</span><span class="w"> </span><span class="o">=</span><span class="w"> </span><span class="nd">@DataTypeHint</span><span class="p">(</span><span class="s">&#34;STRING&#34;</span><span class="p">))})</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="kd">public</span><span class="w"> </span><span class="kt">void</span><span class="w"> </span><span class="nf">eval</span><span class="p">(</span><span class="n">String</span><span class="w"> </span><span class="n">arg1</span><span class="p">,</span><span class="w"> </span><span class="n">String</span><span class="w"> </span><span class="n">arg2</span><span class="p">,</span><span class="w"> </span><span class="n">String</span><span class="w"> </span><span class="n">arg3</span><span class="p">)</span><span class="w"> </span><span class="p">{</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="n">collect</span><span class="p">(</span><span class="n">arg1</span><span class="w"> </span><span class="o">+</span><span class="w"> </span><span class="s">&#34;, &#34;</span><span class="w"> </span><span class="o">+</span><span class="w"> </span><span class="n">arg2</span><span class="w"> </span><span class="o">+</span><span class="w"> </span><span class="s">&#34;,&#34;</span><span class="w"> </span><span class="o">+</span><span class="w"> </span><span class="n">arg3</span><span class="p">);</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="p">}</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="p">}</span><span class="w">
</span></span></span></code></pre></div><p>When calling the function in SQL, parameters can be specified by name, for example:</p>
<div class="highlight"><pre tabindex="0" class="chroma"><code class="language-sql" data-lang="sql"><span class="line"><span class="cl"><span class="k">SELECT</span><span class="w"> </span><span class="o">*</span><span class="w"> </span><span class="k">FROM</span><span class="w"> </span><span class="k">TABLE</span><span class="p">(</span><span class="n">myFunction</span><span class="p">(</span><span class="n">in1</span><span class="w"> </span><span class="o">=&gt;</span><span class="w"> </span><span class="s1">&#39;v1&#39;</span><span class="p">,</span><span class="w"> </span><span class="n">in3</span><span class="w"> </span><span class="o">=&gt;</span><span class="w"> </span><span class="s1">&#39;v3&#39;</span><span class="p">,</span><span class="w"> </span><span class="n">in2</span><span class="w"> </span><span class="o">=&gt;</span><span class="w"> </span><span class="s1">&#39;v2&#39;</span><span class="p">));</span><span class="w">
</span></span></span></code></pre></div><p>Also the optional parameters can be omitted:</p>
<div class="highlight"><pre tabindex="0" class="chroma"><code class="language-sql" data-lang="sql"><span class="line"><span class="cl"><span class="k">SELECT</span><span class="w"> </span><span class="o">*</span><span class="w"> </span><span class="k">FROM</span><span class="w"> </span><span class="k">TABLE</span><span class="p">(</span><span class="n">myFunction</span><span class="p">(</span><span class="n">in1</span><span class="w"> </span><span class="o">=&gt;</span><span class="w"> </span><span class="s1">&#39;v1&#39;</span><span class="p">));</span><span class="w">
</span></span></span></code></pre></div><p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/functions/udfs/#named-parameters">Documentation</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-387%3A&#43;Support&#43;named&#43;parameters&#43;for&#43;functions&#43;and&#43;call&#43;procedures">FLIP-387: Support named parameters for functions and call procedures</a></li>
</ul>
<h2 id="window-tvf-aggregation-features">
Window TVF Aggregation Features
<a class="anchor" href="#window-tvf-aggregation-features">#</a>
</h2>
<ul>
<li>Supports SESSION Window TVF in Streaming Mode<br />
Now users can use SESSION Window TVF in streaming mode. A simple example is as follows:</li>
</ul>
<div class="highlight"><pre tabindex="0" class="chroma"><code class="language-sql" data-lang="sql"><span class="line"><span class="cl"><span class="c1">-- session window with partition keys
</span></span></span><span class="line"><span class="cl"><span class="c1"></span><span class="k">SELECT</span><span class="w"> </span><span class="o">*</span><span class="w"> </span><span class="k">FROM</span><span class="w"> </span><span class="k">TABLE</span><span class="p">(</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="k">SESSION</span><span class="p">(</span><span class="k">TABLE</span><span class="w"> </span><span class="n">Bid</span><span class="w"> </span><span class="n">PARTITION</span><span class="w"> </span><span class="k">BY</span><span class="w"> </span><span class="n">item</span><span class="p">,</span><span class="w"> </span><span class="k">DESCRIPTOR</span><span class="p">(</span><span class="n">bidtime</span><span class="p">),</span><span class="w"> </span><span class="nb">INTERVAL</span><span class="w"> </span><span class="s1">&#39;5&#39;</span><span class="w"> </span><span class="n">MINUTES</span><span class="p">));</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="c1">-- apply aggregation on the session windowed table with partition keys
</span></span></span><span class="line"><span class="cl"><span class="c1"></span><span class="k">SELECT</span><span class="w"> </span><span class="n">window_start</span><span class="p">,</span><span class="w"> </span><span class="n">window_end</span><span class="p">,</span><span class="w"> </span><span class="n">item</span><span class="p">,</span><span class="w"> </span><span class="k">SUM</span><span class="p">(</span><span class="n">price</span><span class="p">)</span><span class="w"> </span><span class="k">AS</span><span class="w"> </span><span class="n">total_price</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="k">FROM</span><span class="w"> </span><span class="k">TABLE</span><span class="p">(</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"> </span><span class="k">SESSION</span><span class="p">(</span><span class="k">TABLE</span><span class="w"> </span><span class="n">Bid</span><span class="w"> </span><span class="n">PARTITION</span><span class="w"> </span><span class="k">BY</span><span class="w"> </span><span class="n">item</span><span class="p">,</span><span class="w"> </span><span class="k">DESCRIPTOR</span><span class="p">(</span><span class="n">bidtime</span><span class="p">),</span><span class="w"> </span><span class="nb">INTERVAL</span><span class="w"> </span><span class="s1">&#39;5&#39;</span><span class="w"> </span><span class="n">MINUTES</span><span class="p">))</span><span class="w">
</span></span></span><span class="line"><span class="cl"><span class="w"></span><span class="k">GROUP</span><span class="w"> </span><span class="k">BY</span><span class="w"> </span><span class="n">item</span><span class="p">,</span><span class="w"> </span><span class="n">window_start</span><span class="p">,</span><span class="w"> </span><span class="n">window_end</span><span class="p">;</span><span class="w">
</span></span></span></code></pre></div><ul>
<li>Supports Changelog Inputs for Window TVF Aggregation<br />
Window aggregation operators (generated based on Window TVF Function) can now handle changelog
streams (e.g., CDC data sources, etc.). Users are recommended to migrate from legacy window
aggregation to the new syntax for more complete feature support.</li>
</ul>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/sql/queries/window-tvf/#session">Documentation</a></li>
</ul>
<h2 id="new-udf-type-asyncscalarfunction">
New UDF Type: AsyncScalarFunction
<a class="anchor" href="#new-udf-type-asyncscalarfunction">#</a>
</h2>
<p>The common UDF type <code>ScalarFunction</code> works well for CPU-intensive operations, but less well for IO
bound or otherwise long-running computations. In Flink 1.19, we have a new <code>AsyncScalarFunction</code>
which is a user-defined asynchronous <code>ScalarFunction</code> allows for issuing concurrent function calls
asynchronously.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-400%3A&#43;AsyncScalarFunction&#43;for&#43;asynchronous&#43;scalar&#43;function&#43;support">FLIP-400: AsyncScalarFunction for asynchronous scalar function support</a></li>
</ul>
<h2 id="tuning-minibatch-optimization-for-regular-joins">
Tuning: MiniBatch Optimization for Regular Joins
<a class="anchor" href="#tuning-minibatch-optimization-for-regular-joins">#</a>
</h2>
<p>The record amplification is a pain point when performing cascading joins in Flink, now in Flink 1.19,
the new mini-batch optimization can be used for regular join to reduce intermediate result in such
cascading join scenarios.</p>
<div style="text-align: center;">
<img src="/img/blog/2024-03-18-release-1.19.0/minibatch_join.png" style="width:90%;margin:15px">
</div>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/tuning/#minibatch-regular-joins">minibatch-regular-joins</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-415%3A&#43;Introduce&#43;a&#43;new&#43;join&#43;operator&#43;to&#43;support&#43;minibatch">FLIP-415: Introduce a new join operator to support minibatch</a></li>
</ul>
<h1 id="runtime--coordination-improvements">
Runtime &amp; Coordination Improvements
<a class="anchor" href="#runtime--coordination-improvements">#</a>
</h1>
<h2 id="dynamic-source-parallelism-inference-for-batch-jobs">
Dynamic Source Parallelism Inference for Batch Jobs
<a class="anchor" href="#dynamic-source-parallelism-inference-for-batch-jobs">#</a>
</h2>
<p>In Flink 1.19, we have supported dynamic source parallelism inference for batch jobs, which allows
source connectors to dynamically infer the parallelism based on the actual amount of data to consume.
This feature is a significant improvement over previous versions, which only assigned a fixed default
parallelism to source vertices.
Source connectors need to implement the inference interface to enable dynamic parallelism inference.
Currently, the FileSource connector has already been developed with this functionality in place.
Additionally, the configuration <code>execution.batch.adaptive.auto-parallelism.default-source-parallelism</code>
will be used as the upper bound of source parallelism inference. And now it will not default to 1.
Instead, if it is not set, the upper bound of allowed parallelism set via
<code>execution.batch.adaptive.auto-parallelism.max-parallelism</code> will be used. If that configuration is
also not set, the default parallelism set via <code>parallelism.default</code> or <code>StreamExecutionEnvironment#setParallelism()</code>
will be used instead.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/deployment/elastic_scaling/#enable-dynamic-parallelism-inference-support-for-sources">Documentation</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-379%3A&#43;Dynamic&#43;source&#43;parallelism&#43;inference&#43;for&#43;batch&#43;jobs">FLIP-379: Support dynamic source parallelism inference for batch jobs</a></li>
</ul>
<h2 id="standard-yaml-for-flink-configuration">
Standard YAML for Flink Configuration
<a class="anchor" href="#standard-yaml-for-flink-configuration">#</a>
</h2>
<p>Starting with Flink 1.19, Flink has officially introduced full support for the standard YAML 1.2
syntax. The default configuration file has been changed to <code>config.yaml</code> and placed in the <code>conf/</code>
directory. Users should directly modify this file to configure Flink.
If users want to use the legacy configuration file <code>flink-conf.yaml</code>, users just need to copy this
file into the <code>conf/</code> directory. Once the legacy configuration file <code>flink-conf.yaml</code> is detected,
Flink will prioritize using it as the configuration file. And in the upcoming Flink 2.0, the
<code>flink-conf.yaml</code> configuration file will no longer work.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/deployment/config/#flink-configuration-file">flink-configuration-file</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-366%3A&#43;Support&#43;standard&#43;YAML&#43;for&#43;FLINK&#43;configuration?src=contextnavpagetreemode">FLIP-366: Support standard YAML for Flink configuration</a></li>
</ul>
<h2 id="profiling-jobmanagertaskmanager-on-flink-web">
Profiling JobManager/TaskManager on Flink Web
<a class="anchor" href="#profiling-jobmanagertaskmanager-on-flink-web">#</a>
</h2>
<p>In Flink 1.19, we support triggering profiling at the JobManager/TaskManager level, allowing users to
create a profiling instance with arbitrary intervals and event modes (supported by <a href="https://github.com/async-profiler/async-profiler">async-profiler</a>).
Users can easily submit profiles and export results in the Flink Web UI.</p>
<p>For example, users can simply submit a profiling instance with a specified period and mode by
&ldquo;Creating a Profiling Instance&rdquo; after identifying a candidate TaskManager/JobManager with a
performance bottleneck:</p>
<div style="text-align: center;">
<img src="/img/blog/2024-03-18-release-1.19.0/profiling.png" style="width:90%;margin:15px">
</div>
<p>then easily download the interactive HTML file after the profiling instance is complete:</p>
<div style="text-align: center;">
<img src="/img/blog/2024-03-18-release-1.19.0/profiling-res.png" style="width:90%;margin:15px">
</div>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/ops/debugging/profiler/">Documentation</a></li>
<li><a href="https://cwiki.apache.org/confluence/x/64lEE">FLIP-375: Built-in cross-platform powerful java profiler</a></li>
</ul>
<h2 id="new-config-options-for-administrator-jvm-options">
New Config Options for Administrator JVM Options
<a class="anchor" href="#new-config-options-for-administrator-jvm-options">#</a>
</h2>
<p>A set of administrator JVM options are available, which prepend the user-set extra JVM options for
platform-wide JVM tuning.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/deployment/config/#jvm-and-logging-options">Documentation</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-397%3A&#43;Add&#43;config&#43;options&#43;for&#43;administrator&#43;JVM&#43;options?src=jira">FLIP-397: Add config options for administrator JVM options</a></li>
</ul>
<h2 id="beta-support-for-java-21">
Beta Support for Java 21
<a class="anchor" href="#beta-support-for-java-21">#</a>
</h2>
<p>Apache Flink was made ready to compile and run with Java 21. This feature is still in beta mode.
Issues should be reported in Flink&rsquo;s bug tracker.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://issues.apache.org/jira/browse/FLINK-33163">FLINK-33163</a></li>
</ul>
<h1 id="checkpoints-improvements">
Checkpoints Improvements
<a class="anchor" href="#checkpoints-improvements">#</a>
</h1>
<h2 id="using-larger-checkpointing-interval-when-source-is-processing-backlog">
Using Larger Checkpointing Interval When Source is Processing Backlog
<a class="anchor" href="#using-larger-checkpointing-interval-when-source-is-processing-backlog">#</a>
</h2>
<p><code>IsProcessingBacklog</code> is introduced to indicate whether a record should be processed with low latency
or high throughput. Connector developers can update the Source implementation to utilize the
<code>SplitEnumeratorContext#setIsProcessingBacklog</code> method to report whether the records are backlog records.
Users can set the <code>execution.checkpointing.interval-during-backlog</code> to use a larger checkpoint interval
to enhance the throughput while the job is processing backlog if the source is backlog-aware.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://issues.apache.org/jira/browse/FLINK-32514">FLINK-32514</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-309%3A&#43;Support&#43;using&#43;larger&#43;checkpointing&#43;interval&#43;when&#43;source&#43;is&#43;processing&#43;backlog">FLIP-309: Support using larger checkpointing interval when source is processing backlog</a></li>
</ul>
<h2 id="checkpointscleaner-clean-individual-checkpoint-states-in-parallel">
CheckpointsCleaner Clean Individual Checkpoint States in Parallel
<a class="anchor" href="#checkpointscleaner-clean-individual-checkpoint-states-in-parallel">#</a>
</h2>
<p>Now when disposing of no longer needed checkpoints, every state handle/state file will be disposed
in parallel by the ioExecutor, vastly improving the disposing speed of a single checkpoint (for
large checkpoints the disposal time can be improved from 10 minutes to &lt; 1 minute) . The old
behavior can be restored by setting <code>state.checkpoint.cleaner.parallel-mode</code> to false.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://issues.apache.org/jira/browse/FLINK-33090">FLINK-33090</a></li>
</ul>
<h2 id="trigger-checkpoints-through-command-line-client">
Trigger Checkpoints through Command Line Client
<a class="anchor" href="#trigger-checkpoints-through-command-line-client">#</a>
</h2>
<p>The command line interface supports triggering a checkpoint manually. Usage:</p>
<div class="highlight"><pre tabindex="0" class="chroma"><code class="language-shell" data-lang="shell"><span class="line"><span class="cl">./bin/flink checkpoint <span class="nv">$JOB_ID</span> <span class="o">[</span>-full<span class="o">]</span>
</span></span></code></pre></div><p>By specifying the &lsquo;-full&rsquo; option, a full checkpoint is triggered. Otherwise an incremental
checkpoint is triggered if the job is configured to take incremental ones periodically.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://issues.apache.org/jira/browse/FLINK-6755">FLINK-6755</a></li>
</ul>
<h1 id="connector-api-improvements">
Connector API Improvements
<a class="anchor" href="#connector-api-improvements">#</a>
</h1>
<h2 id="new-interfaces-to-sinkv2-that-are-consistent-with-source-api">
New Interfaces to SinkV2 That Are Consistent with Source API
<a class="anchor" href="#new-interfaces-to-sinkv2-that-are-consistent-with-source-api">#</a>
</h2>
<p>In Flink 1.19, the SinkV2 API made some changes to align with Source API.<br />
The following interfaces are deprecated: <code>TwoPhaseCommittingSink</code>, <code>StatefulSink</code>, <code>WithPreWriteTopology</code>, <code>WithPreCommitTopology</code>, <code>WithPostCommitTopology</code>.<br />
The following new interfaces have been introduced: <code>CommitterInitContext</code>, <code>CommittingSinkWriter</code>, <code>WriterInitContext</code>, <code>StatefulSinkWriter</code>.<br />
The following interface method&rsquo;s parameter has been changed: <code>Sink#createWriter</code>.<br />
The original interfaces will remain available during the 1.19 release line, but they will be removed
in consecutive releases.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://issues.apache.org/jira/browse/FLINK-33973">FLINK-33973</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-372%3A&#43;Enhance&#43;and&#43;synchronize&#43;Sink&#43;API&#43;to&#43;match&#43;the&#43;Source&#43;API">FLIP-372: Enhance and synchronize Sink API to match the Source API</a></li>
</ul>
<h2 id="new-committer-metrics-to-track-the-status-of-committables">
New Committer Metrics to Track the Status of Committables
<a class="anchor" href="#new-committer-metrics-to-track-the-status-of-committables">#</a>
</h2>
<p>The <code>TwoPhaseCommittingSink#createCommitter</code> method parameterization has been changed, a new
<code>CommitterInitContext</code> parameter has been added. The original method will remain available during
the 1.19 release line, but they will be removed in consecutive releases.</p>
<p><strong>More Information</strong></p>
<ul>
<li><a href="https://issues.apache.org/jira/browse/FLINK-25857">FLINK-25857</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/FLINK/FLIP-371%3A&#43;Provide&#43;initialization&#43;context&#43;for&#43;Committer&#43;creation&#43;in&#43;TwoPhaseCommittingSink">FLIP-371: Provide initialization context for Committer creation in TwoPhaseCommittingSink</a></li>
</ul>
<h1 id="important-deprecations">
Important Deprecations
<a class="anchor" href="#important-deprecations">#</a>
</h1>
<p>In preparation for the release of Flink 2.0 later this year, the community has decided to officially
deprecate multiple APIs that were approaching end of life for a while.</p>
<ul>
<li>Flink&rsquo;s <a href="https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/common/time/Time.java"><code>org.apache.flink.api.common.time.Time</code></a> is now officially deprecated and will be dropped in Flink 2.0
Please migrate it to Java&rsquo;s own <code>Duration</code> class. Methods supporting the <code>Duration</code> class that replace the deprecated <code>Time</code>-based methods were introduced.</li>
<li><a href="https://github.com/apache/flink/blob/release-1.19/flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/RestoreMode.java#L40"><code>org.apache.flink.runtime.jobgraph.RestoreMode#LEGACY</code></a> is deprecated. Please use <a href="https://github.com/apache/flink/blob/release-1.19/flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/RestoreMode.java#L31"><code>RestoreMode#CLAIM</code></a> or <a href="https://github.com/apache/flink/blob/release-1.19/flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/RestoreMode.java#L34"><code>RestoreMode#NO_CLAIM</code></a> mode instead to get a clear state file ownership when restoring.</li>
<li>The old method of resolving schema compatibility has been deprecated, please migrate to the new method following <a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/datastream/fault-tolerance/serialization/custom_serialization/#migrating-from-deprecated-typeserializersnapshotresolveschemacompatibilityt">Migrating from deprecated <code>TypeSerializerSnapshot#resolveSchemaCompatibility(TypeSerializer newSerializer)</code> before Flink 1.19</a>.</li>
<li>Configuring serialization behavior through hard codes is deprecated, e.g., <a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java#L643"><code>ExecutionConfig#enableForceKryo()</code></a>. Please use the options
<code>pipeline.serialization-config</code>, <code>pipeline.force-avro</code>, <code>pipeline.force-kryo</code>, and <code>pipeline.generic-types</code>. Registration of instance-level serializers is deprecated, using class-level serializers instead.</li>
<li>We have deprecated all <code>setXxx</code> and <code>getXxx</code> methods except <a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java#L176"><code>getString(String key, String defaultValue)</code></a>
and <a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java#L220"><code>setString(String key, String value)</code></a>, such as: <code>setInteger</code>, <code>setLong</code>, <code>getInteger</code> and <code>getLong</code> etc.
Users and developers are recommend to use get and set methods with <code>ConfigOption</code> instead of string as key.</li>
<li>The non-<code>ConfigOption</code> objects in the <code>StreamExecutionEnvironment</code>, <code>CheckpointConfig</code>, and <code>ExecutionConfig</code> and their corresponding getter/setter interfaces are now be deprecated. These objects and methods are planned to be removed in Flink 2.0. The deprecated interfaces include the getter and setter methods of <code>RestartStrategy</code>, <code>CheckpointStorage</code>, and <code>StateBackend</code>.</li>
<li><a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java#L191"><code>org.apache.flink.api.common.functions.RuntimeContext#getExecutionConfig</code></a> is now officially deprecated and will be dropped in Flink 2.0. Please migrate all related usages to the new getter method:<br />
Migrate <a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/TypeInformation.java#L201"><code>TypeInformation#createSerializer</code></a> to <a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java#L200"><code>RuntimeContext#createTypeSerializer</code></a><br />
Migrate <code>RuntimeContext#getExecutionConfig.getGlobalJobParameters</code> to <a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java#L208"><code>RuntimeContext#getGlobalJobParameters</code></a><br />
Migrate <code>RuntimeContext#getExecutionConfig.isObjectReuseEnabled()</code> to <a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java#L216"><code>RuntimeContext#isObjectReuseEnabled</code></a></li>
<li><a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/api/common/functions/RichFunction.java#L76"><code>org.apache.flink.api.common.functions.RichFunction#open(Configuration parameters)</code></a> method has been deprecated and will be removed in future versions.
Users are encouraged to migrate to the new <a href="https://github.com/apache/flink/blob/release-1.19/flink-core/src/main/java/org/apache/flink/api/common/functions/RichFunction.java#L118"><code>RichFunction#open(OpenContext openContext)</code></a>.</li>
<li><a href="https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/AkkaOptions.java"><code>org.apache.flink.configuration.AkkaOptions</code></a> is deprecated and replaced with <a href="https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/RpcOptions.java"><code>RpcOptions</code></a>.</li>
</ul>
<h1 id="upgrade-notes">
Upgrade Notes
<a class="anchor" href="#upgrade-notes">#</a>
</h1>
<p>The Flink community tries to ensure that upgrades are as seamless as possible.
However, certain changes may require users to make adjustments to certain parts
of the program when upgrading to version 1.19. Please refer to the
<a href="https://nightlies.apache.org/flink/flink-docs-release-1.19/release-notes/flink-1.19/">release notes</a>
for a comprehensive list of adjustments to make and issues to check during the
upgrading process.</p>
<h1 id="list-of-contributors">
List of Contributors
<a class="anchor" href="#list-of-contributors">#</a>
</h1>
<p>The Apache Flink community would like to express gratitude to all the
contributors who made this release possible:</p>
<p>Adi Polak,
Ahmed Hamdy,
Akira Ajisaka,
Alan Sheinberg,
Aleksandr Pilipenko,
Alex Wu,
Alexander Fedulov,
Archit Goyal,
Asha Boyapati,
Benchao Li,
Bo Cui,
Cheena Budhiraja,
Chesnay Schepler,
Dale Lane,
Danny Cranmer,
David Moravek,
Dawid Wysakowicz,
Deepyaman Datta,
Dian Fu,
Dmitriy Linevich,
Elkhan Dadashov,
Eric Brzezenski,
Etienne Chauchot,
Fang Yong,
Feng Jiajie,
Feng Jin,
Ferenc Csaky,
Gabor Somogyi,
Gyula Fora,
Hang Ruan,
Hangxiang Yu,
Hanyu Zheng,
Hjw,
Hong Liang Teoh,
Hongshun Wang,
HuangXingBo,
Jack,
Jacky Lau,
James Hughes,
Jane Chan,
Jerome Gagnon,
Jeyhun Karimov,
Jiabao Sun,
JiangXin,
Jiangjie (Becket) Qin,
Jim Hughes,
Jing Ge,
Jinzhong Li,
JunRuiLee,
Laffery,
Leonard Xu,
Lijie Wang,
Martijn Visser,
Marton Balassi,
Matt Wang,
Matthias Pohl,
Matthias Schwalbe,
Matyas Orhidi,
Maximilian Michels,
Mingliang Liu,
Máté Czagány,
Panagiotis Garefalakis,
ParyshevSergey,
Patrick Lucas,
Peter Huang,
Peter Vary,
Piotr Nowojski,
Prabhu Joseph,
Pranav Sharma,
Qingsheng Ren,
Robin Moffatt,
Roc Marshal,
Rodrigo Meneses,
Roman,
Roman Khachatryan,
Ron,
Rui Fan,
Ruibin Xing,
Ryan Skraba,
Samrat002,
Sergey Nuyanzin,
Shammon FY,
Shengkai,
Stefan Richter,
SuDewei,
TBCCC,
Tartarus0zm,
Thomas Weise,
Timo Walther,
Varun,
Venkata krishnan Sowrirajan,
Vladimir Matveev,
Wang FeiFan,
Weihua Hu,
Weijie Guo,
Wencong Liu,
Xiangyu Feng,
Xianxun Ye,
Xiaogang Zhou,
Xintong Song,
XuShuai,
Xuyang,
Yanfei Lei,
Yangze Guo,
Yi Zhang,
Yu Chen,
Yuan Mei,
Yubin Li,
Yuepeng Pan,
Yun Gao,
Yun Tang,
Yuxin Tan,
Zakelly,
Zhanghao Chen,
Zhu Zhu,
archzi,
bvarghese1,
caicancai,
caodizhou,
dongwoo6kim,
duanyc,
eason.qin,
fengjiajie,
fengli,
gongzhongqiang,
gyang94,
hejufang,
jiangxin,
jiaoqingbo,
jingge,
lijingwei.5018,
lincoln lee,
liuyongvs,
luoyuxia,
mimaomao,
murong00,
polaris6,
pvary,
sharath1709,
simplejason,
sunxia,
sxnan,
tzy123-123,
wangfeifan,
wangzzu,
xiangyu0xf,
xiarui,
xingbo,
xuyang,
yeming,
yhx,
yinhan.yh,
yunfan123,
yunfengzhou-hub,
yunhong,
yuxia Luo,
yuxiang,
zoudan,
周仁祥,
曹帝胄,
朱通通,
马越</p>
</p>
</article>
<div class="edit-this-page">
<p>
<a href="https://cwiki.apache.org/confluence/display/FLINK/Flink+Translation+Specifications">Want to contribute translation?</a>
</p>
<p>
<a href="//github.com/apache/flink-web/edit/asf-site/docs/content/posts/2024-03-18-release-1.19.0.md">
Edit This Page<i class="fa fa-edit fa-fw"></i>
</a>
</p>
</div>
</section>
<aside class="book-toc">
<nav id="TableOfContents"><h3>On This Page <a href="javascript:void(0)" class="toc" onclick="collapseToc()"><i class="fa fa-times" aria-hidden="true"></i></a></h3>
<ul>
<li><a href="#flink-sql-improvements">Flink SQL Improvements</a>
<ul>
<li><a href="#custom-parallelism-for-tablesql-sources">Custom Parallelism for Table/SQL Sources</a></li>
<li><a href="#configurable-sql-gateway-java-options">Configurable SQL Gateway Java Options</a></li>
<li><a href="#configure-different-state-ttls-using-sql-hint">Configure Different State TTLs Using SQL Hint</a></li>
<li><a href="#named-parameters-for-functions-and-procedures">Named Parameters for Functions and Procedures</a></li>
<li><a href="#window-tvf-aggregation-features">Window TVF Aggregation Features</a></li>
<li><a href="#new-udf-type-asyncscalarfunction">New UDF Type: AsyncScalarFunction</a></li>
<li><a href="#tuning-minibatch-optimization-for-regular-joins">Tuning: MiniBatch Optimization for Regular Joins</a></li>
</ul>
</li>
<li><a href="#runtime--coordination-improvements">Runtime &amp; Coordination Improvements</a>
<ul>
<li><a href="#dynamic-source-parallelism-inference-for-batch-jobs">Dynamic Source Parallelism Inference for Batch Jobs</a></li>
<li><a href="#standard-yaml-for-flink-configuration">Standard YAML for Flink Configuration</a></li>
<li><a href="#profiling-jobmanagertaskmanager-on-flink-web">Profiling JobManager/TaskManager on Flink Web</a></li>
<li><a href="#new-config-options-for-administrator-jvm-options">New Config Options for Administrator JVM Options</a></li>
<li><a href="#beta-support-for-java-21">Beta Support for Java 21</a></li>
</ul>
</li>
<li><a href="#checkpoints-improvements">Checkpoints Improvements</a>
<ul>
<li><a href="#using-larger-checkpointing-interval-when-source-is-processing-backlog">Using Larger Checkpointing Interval When Source is Processing Backlog</a></li>
<li><a href="#checkpointscleaner-clean-individual-checkpoint-states-in-parallel">CheckpointsCleaner Clean Individual Checkpoint States in Parallel</a></li>
<li><a href="#trigger-checkpoints-through-command-line-client">Trigger Checkpoints through Command Line Client</a></li>
</ul>
</li>
<li><a href="#connector-api-improvements">Connector API Improvements</a>
<ul>
<li><a href="#new-interfaces-to-sinkv2-that-are-consistent-with-source-api">New Interfaces to SinkV2 That Are Consistent with Source API</a></li>
<li><a href="#new-committer-metrics-to-track-the-status-of-committables">New Committer Metrics to Track the Status of Committables</a></li>
</ul>
</li>
<li><a href="#important-deprecations">Important Deprecations</a></li>
<li><a href="#upgrade-notes">Upgrade Notes</a></li>
<li><a href="#list-of-contributors">List of Contributors</a></li>
</ul>
</nav>
</aside>
<aside class="expand-toc hidden">
<a class="toc" onclick="expandToc()" href="javascript:void(0)">
<i class="fa fa-bars" aria-hidden="true"></i>
</a>
</aside>
</main>
<footer>
<div class="separator"></div>
<div class="panels">
<div class="wrapper">
<div class="panel">
<ul>
<li>
<a href="https://flink-packages.org/">flink-packages.org</a>
</li>
<li>
<a href="https://www.apache.org/">Apache Software Foundation</a>
</li>
<li>
<a href="https://www.apache.org/licenses/">License</a>
</li>
<li>
<a href="/zh/">
<i class="fa fa-globe" aria-hidden="true"></i>&nbsp;中文版
</a>
</li>
</ul>
</div>
<div class="panel">
<ul>
<li>
<a href="/what-is-flink/security">Security</a-->
</li>
<li>
<a href="https://www.apache.org/foundation/sponsorship.html">Donate</a>
</li>
<li>
<a href="https://www.apache.org/foundation/thanks.html">Thanks</a>
</li>
</ul>
</div>
<div class="panel icons">
<div>
<a href="/posts">
<div class="icon flink-blog-icon"></div>
<span>Flink blog</span>
</a>
</div>
<div>
<a href="https://github.com/apache/flink">
<div class="icon flink-github-icon"></div>
<span>Github</span>
</a>
</div>
<div>
<a href="https://twitter.com/apacheflink">
<div class="icon flink-twitter-icon"></div>
<span>Twitter</span>
</a>
</div>
</div>
</div>
</div>
<hr/>
<div class="container disclaimer">
<p>The contents of this website are © 2024 Apache Software Foundation under the terms of the Apache License v2. Apache Flink, Flink, and the Flink logo are either registered trademarks or trademarks of The Apache Software Foundation in the United States and other countries.</p>
</div>
</footer>
</body>
</html>