blob: d019533d10a1ebcc4148093a6ef4c1e334dacff6 [file] [log] [blame]
<!DOCTYPE html>
<html class="writer-html5" lang="en" data-content_root="./">
<head>
<meta charset="utf-8" /><meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>apache_beam.ml.inference package &mdash; Apache Beam 2.67.0 documentation</title>
<link rel="stylesheet" type="text/css" href="_static/pygments.css?v=b86133f3" />
<link rel="stylesheet" type="text/css" href="_static/css/theme.css?v=e59714d7" />
<script src="_static/jquery.js?v=5d32c60e"></script>
<script src="_static/_sphinx_javascript_frameworks_compat.js?v=2cd50e6c"></script>
<script src="_static/documentation_options.js?v=959b4fbe"></script>
<script src="_static/doctools.js?v=9a2dae69"></script>
<script src="_static/sphinx_highlight.js?v=dc90522c"></script>
<script src="_static/js/theme.js"></script>
<link rel="index" title="Index" href="genindex.html" />
<link rel="search" title="Search" href="search.html" />
<link rel="next" title="apache_beam.ml.inference.base module" href="apache_beam.ml.inference.base.html" />
<link rel="prev" title="apache_beam.ml.gcp.visionml module" href="apache_beam.ml.gcp.visionml.html" />
</head>
<body class="wy-body-for-nav">
<div class="wy-grid-for-nav">
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-scroll">
<div class="wy-side-nav-search" >
<a href="index.html" class="icon icon-home">
Apache Beam
</a>
<div role="search">
<form id="rtd-search-form" class="wy-form" action="search.html" method="get">
<input type="text" name="q" placeholder="Search docs" aria-label="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div><div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="Navigation menu">
<ul class="current">
<li class="toctree-l1"><a class="reference internal" href="apache_beam.coders.html">apache_beam.coders package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.dataframe.html">apache_beam.dataframe package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.io.html">apache_beam.io package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.metrics.html">apache_beam.metrics package</a></li>
<li class="toctree-l1 current"><a class="reference internal" href="apache_beam.ml.html">apache_beam.ml package</a><ul class="current">
<li class="toctree-l2 current"><a class="reference internal" href="apache_beam.ml.html#subpackages">Subpackages</a><ul class="current">
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.anomaly.html">apache_beam.ml.anomaly package</a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.gcp.html">apache_beam.ml.gcp package</a></li>
<li class="toctree-l3 current"><a class="current reference internal" href="#">apache_beam.ml.inference package</a><ul>
<li class="toctree-l4"><a class="reference internal" href="#submodules">Submodules</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.rag.html">apache_beam.ml.rag package</a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.transforms.html">apache_beam.ml.transforms package</a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.ts.html">apache_beam.ml.ts package</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.options.html">apache_beam.options package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.portability.html">apache_beam.portability package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.runners.html">apache_beam.runners package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.testing.html">apache_beam.testing package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.transforms.html">apache_beam.transforms package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.typehints.html">apache_beam.typehints package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.utils.html">apache_beam.utils package</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.yaml.html">apache_beam.yaml package</a></li>
</ul>
<ul>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.error.html">apache_beam.error module</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.pipeline.html">apache_beam.pipeline module</a></li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.pvalue.html">apache_beam.pvalue module</a></li>
</ul>
</div>
</div>
</nav>
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"><nav class="wy-nav-top" aria-label="Mobile navigation menu" >
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="index.html">Apache Beam</a>
</nav>
<div class="wy-nav-content">
<div class="rst-content">
<div role="navigation" aria-label="Page navigation">
<ul class="wy-breadcrumbs">
<li><a href="index.html" class="icon icon-home" aria-label="Home"></a></li>
<li class="breadcrumb-item"><a href="apache_beam.ml.html">apache_beam.ml package</a></li>
<li class="breadcrumb-item active">apache_beam.ml.inference package</li>
<li class="wy-breadcrumbs-aside">
<a href="_sources/apache_beam.ml.inference.rst.txt" rel="nofollow"> View page source</a>
</li>
</ul>
<hr/>
</div>
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
<div itemprop="articleBody">
<section id="module-apache_beam.ml.inference">
<span id="apache-beam-ml-inference-package"></span><h1>apache_beam.ml.inference package<a class="headerlink" href="#module-apache_beam.ml.inference" title="Link to this heading"></a></h1>
<p>A package with various modules for running inferences and predictions
on models. This package contains support for popular frameworks as well
as an interface for adding unsupported frameworks.</p>
<p>Note: on top of the frameworks captured in submodules below, Beam also has
a supported TensorFlow model handler via the tfx-bsl library. See
<a class="reference external" href="https://beam.apache.org/documentation/ml/about-ml/#tensorflow">https://beam.apache.org/documentation/ml/about-ml/#tensorflow</a>
for more information on using TensorFlow in Beam.</p>
<section id="submodules">
<h2>Submodules<a class="headerlink" href="#submodules" title="Link to this heading"></a></h2>
<div class="toctree-wrapper compound">
<ul>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.base.html">apache_beam.ml.inference.base module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.PredictionResult"><code class="docutils literal notranslate"><span class="pre">PredictionResult</span></code></a></li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelMetadata"><code class="docutils literal notranslate"><span class="pre">ModelMetadata</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelMetadata.model_id"><code class="docutils literal notranslate"><span class="pre">ModelMetadata.model_id</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelMetadata.model_name"><code class="docutils literal notranslate"><span class="pre">ModelMetadata.model_name</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInferenceDLQ"><code class="docutils literal notranslate"><span class="pre">RunInferenceDLQ</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInferenceDLQ.failed_inferences"><code class="docutils literal notranslate"><span class="pre">RunInferenceDLQ.failed_inferences</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInferenceDLQ.failed_preprocessing"><code class="docutils literal notranslate"><span class="pre">RunInferenceDLQ.failed_preprocessing</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInferenceDLQ.failed_postprocessing"><code class="docutils literal notranslate"><span class="pre">RunInferenceDLQ.failed_postprocessing</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyModelPathMapping"><code class="docutils literal notranslate"><span class="pre">KeyModelPathMapping</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyModelPathMapping.keys"><code class="docutils literal notranslate"><span class="pre">KeyModelPathMapping.keys</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyModelPathMapping.update_path"><code class="docutils literal notranslate"><span class="pre">KeyModelPathMapping.update_path</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyModelPathMapping.model_id"><code class="docutils literal notranslate"><span class="pre">KeyModelPathMapping.model_id</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler"><code class="docutils literal notranslate"><span class="pre">ModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.load_model"><code class="docutils literal notranslate"><span class="pre">ModelHandler.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.run_inference"><code class="docutils literal notranslate"><span class="pre">ModelHandler.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">ModelHandler.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">ModelHandler.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.get_resource_hints"><code class="docutils literal notranslate"><span class="pre">ModelHandler.get_resource_hints()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">ModelHandler.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.validate_inference_args"><code class="docutils literal notranslate"><span class="pre">ModelHandler.validate_inference_args()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.update_model_path"><code class="docutils literal notranslate"><span class="pre">ModelHandler.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.update_model_paths"><code class="docutils literal notranslate"><span class="pre">ModelHandler.update_model_paths()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.get_preprocess_fns"><code class="docutils literal notranslate"><span class="pre">ModelHandler.get_preprocess_fns()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.get_postprocess_fns"><code class="docutils literal notranslate"><span class="pre">ModelHandler.get_postprocess_fns()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.should_skip_batching"><code class="docutils literal notranslate"><span class="pre">ModelHandler.should_skip_batching()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.set_environment_vars"><code class="docutils literal notranslate"><span class="pre">ModelHandler.set_environment_vars()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.with_preprocess_fn"><code class="docutils literal notranslate"><span class="pre">ModelHandler.with_preprocess_fn()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.with_postprocess_fn"><code class="docutils literal notranslate"><span class="pre">ModelHandler.with_postprocess_fn()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.with_no_batching"><code class="docutils literal notranslate"><span class="pre">ModelHandler.with_no_batching()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">ModelHandler.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.model_copies"><code class="docutils literal notranslate"><span class="pre">ModelHandler.model_copies()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.override_metrics"><code class="docutils literal notranslate"><span class="pre">ModelHandler.override_metrics()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler.should_garbage_collect_on_timeout"><code class="docutils literal notranslate"><span class="pre">ModelHandler.should_garbage_collect_on_timeout()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RemoteModelHandler"><code class="docutils literal notranslate"><span class="pre">RemoteModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RemoteModelHandler.create_client"><code class="docutils literal notranslate"><span class="pre">RemoteModelHandler.create_client()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RemoteModelHandler.load_model"><code class="docutils literal notranslate"><span class="pre">RemoteModelHandler.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RemoteModelHandler.retry_on_exception"><code class="docutils literal notranslate"><span class="pre">RemoteModelHandler.retry_on_exception()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RemoteModelHandler.run_inference"><code class="docutils literal notranslate"><span class="pre">RemoteModelHandler.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RemoteModelHandler.request"><code class="docutils literal notranslate"><span class="pre">RemoteModelHandler.request()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyModelMapping"><code class="docutils literal notranslate"><span class="pre">KeyModelMapping</span></code></a></li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.load_model"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.run_inference"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.get_resource_hints"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.get_resource_hints()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.validate_inference_args"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.validate_inference_args()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.update_model_paths"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.update_model_paths()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.update_model_path"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.model_copies"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.model_copies()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.override_metrics"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.override_metrics()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.KeyedModelHandler.should_garbage_collect_on_timeout"><code class="docutils literal notranslate"><span class="pre">KeyedModelHandler.should_garbage_collect_on_timeout()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.load_model"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.run_inference"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.get_resource_hints"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.get_resource_hints()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.validate_inference_args"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.validate_inference_args()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.update_model_path"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.get_preprocess_fns"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.get_preprocess_fns()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.get_postprocess_fns"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.get_postprocess_fns()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.should_skip_batching"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.should_skip_batching()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.MaybeKeyedModelHandler.model_copies"><code class="docutils literal notranslate"><span class="pre">MaybeKeyedModelHandler.model_copies()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInference"><code class="docutils literal notranslate"><span class="pre">RunInference</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInference.annotations"><code class="docutils literal notranslate"><span class="pre">RunInference.annotations()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInference.from_callable"><code class="docutils literal notranslate"><span class="pre">RunInference.from_callable()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInference.expand"><code class="docutils literal notranslate"><span class="pre">RunInference.expand()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInference.with_exception_handling"><code class="docutils literal notranslate"><span class="pre">RunInference.with_exception_handling()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.load_model_status"><code class="docutils literal notranslate"><span class="pre">load_model_status()</span></code></a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.gemini_inference.html">apache_beam.ml.inference.gemini_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.gemini_inference.html#apache_beam.ml.inference.gemini_inference.generate_from_string"><code class="docutils literal notranslate"><span class="pre">generate_from_string()</span></code></a></li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.gemini_inference.html#apache_beam.ml.inference.gemini_inference.GeminiModelHandler"><code class="docutils literal notranslate"><span class="pre">GeminiModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.gemini_inference.html#apache_beam.ml.inference.gemini_inference.GeminiModelHandler.create_client"><code class="docutils literal notranslate"><span class="pre">GeminiModelHandler.create_client()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.gemini_inference.html#apache_beam.ml.inference.gemini_inference.GeminiModelHandler.request"><code class="docutils literal notranslate"><span class="pre">GeminiModelHandler.request()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html">apache_beam.ml.inference.huggingface_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor.load_model"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor.run_inference"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor.update_model_path"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor.model_copies"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor.model_copies()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerTensor.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerTensor.get_metrics_namespace()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor.load_model"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor.run_inference"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor.update_model_path"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor.model_copies"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor.model_copies()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFaceModelHandlerKeyedTensor.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">HuggingFaceModelHandlerKeyedTensor.get_metrics_namespace()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler.load_model"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler.run_inference"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler.update_model_path"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler.model_copies"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler.model_copies()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.huggingface_inference.html#apache_beam.ml.inference.huggingface_inference.HuggingFacePipelineModelHandler.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">HuggingFacePipelineModelHandler.get_metrics_namespace()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html">apache_beam.ml.inference.onnx_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html#apache_beam.ml.inference.onnx_inference.OnnxModelHandlerNumpy"><code class="docutils literal notranslate"><span class="pre">OnnxModelHandlerNumpy</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html#apache_beam.ml.inference.onnx_inference.OnnxModelHandlerNumpy.load_model"><code class="docutils literal notranslate"><span class="pre">OnnxModelHandlerNumpy.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html#apache_beam.ml.inference.onnx_inference.OnnxModelHandlerNumpy.run_inference"><code class="docutils literal notranslate"><span class="pre">OnnxModelHandlerNumpy.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html#apache_beam.ml.inference.onnx_inference.OnnxModelHandlerNumpy.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">OnnxModelHandlerNumpy.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html#apache_beam.ml.inference.onnx_inference.OnnxModelHandlerNumpy.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">OnnxModelHandlerNumpy.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html#apache_beam.ml.inference.onnx_inference.OnnxModelHandlerNumpy.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">OnnxModelHandlerNumpy.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html#apache_beam.ml.inference.onnx_inference.OnnxModelHandlerNumpy.model_copies"><code class="docutils literal notranslate"><span class="pre">OnnxModelHandlerNumpy.model_copies()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.onnx_inference.html#apache_beam.ml.inference.onnx_inference.OnnxModelHandlerNumpy.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">OnnxModelHandlerNumpy.batch_elements_kwargs()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html">apache_beam.ml.inference.pytorch_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.load_model"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.update_model_path"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.run_inference"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.validate_inference_args"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.validate_inference_args()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerTensor.model_copies"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerTensor.model_copies()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.load_model"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.update_model_path"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.run_inference"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.validate_inference_args"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.validate_inference_args()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.pytorch_inference.html#apache_beam.ml.inference.pytorch_inference.PytorchModelHandlerKeyedTensor.model_copies"><code class="docutils literal notranslate"><span class="pre">PytorchModelHandlerKeyedTensor.model_copies()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html">apache_beam.ml.inference.sklearn_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy.load_model"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy.update_model_path"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy.run_inference"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerNumpy.model_copies"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerNumpy.model_copies()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas.load_model"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas.update_model_path"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas.run_inference"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.sklearn_inference.html#apache_beam.ml.inference.sklearn_inference.SklearnModelHandlerPandas.model_copies"><code class="docutils literal notranslate"><span class="pre">SklearnModelHandlerPandas.model_copies()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html">apache_beam.ml.inference.tensorflow_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.load_model"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.update_model_path"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.run_inference"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.validate_inference_args"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.validate_inference_args()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.model_copies"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerNumpy.model_copies()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.load_model"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.update_model_path"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.update_model_path()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.run_inference"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.validate_inference_args"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.validate_inference_args()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorflow_inference.html#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.model_copies"><code class="docutils literal notranslate"><span class="pre">TFModelHandlerTensor.model_copies()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html">apache_beam.ml.inference.tensorrt_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngine"><code class="docutils literal notranslate"><span class="pre">TensorRTEngine</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngine.get_engine_attrs"><code class="docutils literal notranslate"><span class="pre">TensorRTEngine.get_engine_attrs()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.batch_elements_kwargs()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.load_model"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.load_onnx"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.load_onnx()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.build_engine"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.build_engine()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.run_inference"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.get_num_bytes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.share_model_across_processes()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.tensorrt_inference.html#apache_beam.ml.inference.tensorrt_inference.TensorRTEngineHandlerNumPy.model_copies"><code class="docutils literal notranslate"><span class="pre">TensorRTEngineHandlerNumPy.model_copies()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.utils.html">apache_beam.ml.inference.utils module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.utils.html#apache_beam.ml.inference.utils.WatchFilePattern"><code class="docutils literal notranslate"><span class="pre">WatchFilePattern</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.utils.html#apache_beam.ml.inference.utils.WatchFilePattern.expand"><code class="docutils literal notranslate"><span class="pre">WatchFilePattern.expand()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.vertex_ai_inference.html">apache_beam.ml.inference.vertex_ai_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.vertex_ai_inference.html#apache_beam.ml.inference.vertex_ai_inference.VertexAIModelHandlerJSON"><code class="docutils literal notranslate"><span class="pre">VertexAIModelHandlerJSON</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vertex_ai_inference.html#apache_beam.ml.inference.vertex_ai_inference.VertexAIModelHandlerJSON.create_client"><code class="docutils literal notranslate"><span class="pre">VertexAIModelHandlerJSON.create_client()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vertex_ai_inference.html#apache_beam.ml.inference.vertex_ai_inference.VertexAIModelHandlerJSON.request"><code class="docutils literal notranslate"><span class="pre">VertexAIModelHandlerJSON.request()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vertex_ai_inference.html#apache_beam.ml.inference.vertex_ai_inference.VertexAIModelHandlerJSON.validate_inference_args"><code class="docutils literal notranslate"><span class="pre">VertexAIModelHandlerJSON.validate_inference_args()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vertex_ai_inference.html#apache_beam.ml.inference.vertex_ai_inference.VertexAIModelHandlerJSON.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">VertexAIModelHandlerJSON.batch_elements_kwargs()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html">apache_beam.ml.inference.vllm_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.OpenAIChatMessage"><code class="docutils literal notranslate"><span class="pre">OpenAIChatMessage</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.OpenAIChatMessage.role"><code class="docutils literal notranslate"><span class="pre">OpenAIChatMessage.role</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.OpenAIChatMessage.content"><code class="docutils literal notranslate"><span class="pre">OpenAIChatMessage.content</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.VLLMCompletionsModelHandler"><code class="docutils literal notranslate"><span class="pre">VLLMCompletionsModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.VLLMCompletionsModelHandler.load_model"><code class="docutils literal notranslate"><span class="pre">VLLMCompletionsModelHandler.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.VLLMCompletionsModelHandler.run_inference"><code class="docutils literal notranslate"><span class="pre">VLLMCompletionsModelHandler.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.VLLMCompletionsModelHandler.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">VLLMCompletionsModelHandler.share_model_across_processes()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.VLLMChatModelHandler"><code class="docutils literal notranslate"><span class="pre">VLLMChatModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.VLLMChatModelHandler.load_model"><code class="docutils literal notranslate"><span class="pre">VLLMChatModelHandler.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.VLLMChatModelHandler.run_inference"><code class="docutils literal notranslate"><span class="pre">VLLMChatModelHandler.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.vllm_inference.html#apache_beam.ml.inference.vllm_inference.VLLMChatModelHandler.share_model_across_processes"><code class="docutils literal notranslate"><span class="pre">VLLMChatModelHandler.share_model_across_processes()</span></code></a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html">apache_beam.ml.inference.xgboost_inference module</a><ul>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandler"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandler</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandler.load_model"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandler.load_model()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandler.get_metrics_namespace"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandler.get_metrics_namespace()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandler.batch_elements_kwargs"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandler.batch_elements_kwargs()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerNumpy"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerNumpy</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerNumpy.run_inference"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerNumpy.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerNumpy.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerNumpy.get_num_bytes()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerPandas"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerPandas</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerPandas.run_inference"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerPandas.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerPandas.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerPandas.get_num_bytes()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerSciPy"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerSciPy</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerSciPy.run_inference"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerSciPy.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerSciPy.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerSciPy.get_num_bytes()</span></code></a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerDatatable"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerDatatable</span></code></a><ul>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerDatatable.run_inference"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerDatatable.run_inference()</span></code></a></li>
<li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.inference.xgboost_inference.html#apache_beam.ml.inference.xgboost_inference.XGBoostModelHandlerDatatable.get_num_bytes"><code class="docutils literal notranslate"><span class="pre">XGBoostModelHandlerDatatable.get_num_bytes()</span></code></a></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</section>
</section>
</div>
</div>
<footer><div class="rst-footer-buttons" role="navigation" aria-label="Footer">
<a href="apache_beam.ml.gcp.visionml.html" class="btn btn-neutral float-left" title="apache_beam.ml.gcp.visionml module" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left" aria-hidden="true"></span> Previous</a>
<a href="apache_beam.ml.inference.base.html" class="btn btn-neutral float-right" title="apache_beam.ml.inference.base module" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right" aria-hidden="true"></span></a>
</div>
<hr/>
<div role="contentinfo">
<p>&#169; Copyright %Y, Apache Beam.</p>
</div>
Built with <a href="https://www.sphinx-doc.org/">Sphinx</a> using a
<a href="https://github.com/readthedocs/sphinx_rtd_theme">theme</a>
provided by <a href="https://readthedocs.org">Read the Docs</a>.
</footer>
</div>
</div>
</section>
</div>
<script>
jQuery(function () {
SphinxRtdTheme.Navigation.enable(true);
});
</script>
</body>
</html>