| |
| |
| <!DOCTYPE html> |
| <html class="writer-html5" lang="en" data-content_root="./"> |
| <head> |
| <meta charset="utf-8" /><meta name="viewport" content="width=device-width, initial-scale=1" /> |
| |
| <meta name="viewport" content="width=device-width, initial-scale=1.0" /> |
| <title>apache_beam.ml.inference.tensorflow_inference module — Apache Beam 2.68.0 documentation</title> |
| <link rel="stylesheet" type="text/css" href="_static/pygments.css?v=b86133f3" /> |
| <link rel="stylesheet" type="text/css" href="_static/css/theme.css?v=e59714d7" /> |
| |
| |
| <script src="_static/jquery.js?v=5d32c60e"></script> |
| <script src="_static/_sphinx_javascript_frameworks_compat.js?v=2cd50e6c"></script> |
| <script src="_static/documentation_options.js?v=2388e03a"></script> |
| <script src="_static/doctools.js?v=9a2dae69"></script> |
| <script src="_static/sphinx_highlight.js?v=dc90522c"></script> |
| <script src="_static/js/theme.js"></script> |
| <link rel="index" title="Index" href="genindex.html" /> |
| <link rel="search" title="Search" href="search.html" /> |
| <link rel="next" title="apache_beam.ml.inference.tensorrt_inference module" href="apache_beam.ml.inference.tensorrt_inference.html" /> |
| <link rel="prev" title="apache_beam.ml.inference.sklearn_inference module" href="apache_beam.ml.inference.sklearn_inference.html" /> |
| </head> |
| |
| <body class="wy-body-for-nav"> |
| <div class="wy-grid-for-nav"> |
| <nav data-toggle="wy-nav-shift" class="wy-nav-side"> |
| <div class="wy-side-scroll"> |
| <div class="wy-side-nav-search" > |
| |
| |
| |
| <a href="index.html" class="icon icon-home"> |
| Apache Beam |
| </a> |
| <div role="search"> |
| <form id="rtd-search-form" class="wy-form" action="search.html" method="get"> |
| <input type="text" name="q" placeholder="Search docs" aria-label="Search docs" /> |
| <input type="hidden" name="check_keywords" value="yes" /> |
| <input type="hidden" name="area" value="default" /> |
| </form> |
| </div> |
| </div><div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="Navigation menu"> |
| <ul class="current"> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.coders.html">apache_beam.coders package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.dataframe.html">apache_beam.dataframe package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.io.html">apache_beam.io package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.metrics.html">apache_beam.metrics package</a></li> |
| <li class="toctree-l1 current"><a class="reference internal" href="apache_beam.ml.html">apache_beam.ml package</a><ul class="current"> |
| <li class="toctree-l2 current"><a class="reference internal" href="apache_beam.ml.html#subpackages">Subpackages</a><ul class="current"> |
| <li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.anomaly.html">apache_beam.ml.anomaly package</a></li> |
| <li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.gcp.html">apache_beam.ml.gcp package</a></li> |
| <li class="toctree-l3 current"><a class="reference internal" href="apache_beam.ml.inference.html">apache_beam.ml.inference package</a><ul class="current"> |
| <li class="toctree-l4 current"><a class="reference internal" href="apache_beam.ml.inference.html#submodules">Submodules</a></li> |
| </ul> |
| </li> |
| <li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.rag.html">apache_beam.ml.rag package</a></li> |
| <li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.transforms.html">apache_beam.ml.transforms package</a></li> |
| <li class="toctree-l3"><a class="reference internal" href="apache_beam.ml.ts.html">apache_beam.ml.ts package</a></li> |
| </ul> |
| </li> |
| </ul> |
| </li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.options.html">apache_beam.options package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.portability.html">apache_beam.portability package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.runners.html">apache_beam.runners package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.testing.html">apache_beam.testing package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.transforms.html">apache_beam.transforms package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.typehints.html">apache_beam.typehints package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.utils.html">apache_beam.utils package</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.yaml.html">apache_beam.yaml package</a></li> |
| </ul> |
| <ul> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.error.html">apache_beam.error module</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.pipeline.html">apache_beam.pipeline module</a></li> |
| <li class="toctree-l1"><a class="reference internal" href="apache_beam.pvalue.html">apache_beam.pvalue module</a></li> |
| </ul> |
| |
| </div> |
| </div> |
| </nav> |
| |
| <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"><nav class="wy-nav-top" aria-label="Mobile navigation menu" > |
| <i data-toggle="wy-nav-top" class="fa fa-bars"></i> |
| <a href="index.html">Apache Beam</a> |
| </nav> |
| |
| <div class="wy-nav-content"> |
| <div class="rst-content"> |
| <div role="navigation" aria-label="Page navigation"> |
| <ul class="wy-breadcrumbs"> |
| <li><a href="index.html" class="icon icon-home" aria-label="Home"></a></li> |
| <li class="breadcrumb-item"><a href="apache_beam.ml.html">apache_beam.ml package</a></li> |
| <li class="breadcrumb-item"><a href="apache_beam.ml.inference.html">apache_beam.ml.inference package</a></li> |
| <li class="breadcrumb-item active">apache_beam.ml.inference.tensorflow_inference module</li> |
| <li class="wy-breadcrumbs-aside"> |
| <a href="_sources/apache_beam.ml.inference.tensorflow_inference.rst.txt" rel="nofollow"> View page source</a> |
| </li> |
| </ul> |
| <hr/> |
| </div> |
| <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article"> |
| <div itemprop="articleBody"> |
| |
| <section id="module-apache_beam.ml.inference.tensorflow_inference"> |
| <span id="apache-beam-ml-inference-tensorflow-inference-module"></span><h1>apache_beam.ml.inference.tensorflow_inference module<a class="headerlink" href="#module-apache_beam.ml.inference.tensorflow_inference" title="Link to this heading"></a></h1> |
| <dl class="py class"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy"> |
| <em class="property"><span class="pre">class</span><span class="w"> </span></em><span class="sig-prename descclassname"><span class="pre">apache_beam.ml.inference.tensorflow_inference.</span></span><span class="sig-name descname"><span class="pre">TFModelHandlerNumpy</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="pre">model_uri:</span> <span class="pre">str,</span> <span class="pre">model_type:</span> <span class="pre">~apache_beam.ml.inference.tensorflow_inference.ModelType</span> <span class="pre">=</span> <span class="pre">ModelType.SAVED_MODEL,</span> <span class="pre">create_model_fn:</span> <span class="pre">~collections.abc.Callable</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">*,</span> <span class="pre">load_model_args:</span> <span class="pre">dict[str,</span> <span class="pre">~typing.Any]</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">custom_weights:</span> <span class="pre">str</span> <span class="pre">=</span> <span class="pre">'',</span> <span class="pre">inference_fn:</span> <span class="pre">~collections.abc.Callable[[tensorflow.Module,</span> <span class="pre">~collections.abc.Sequence[~numpy.ndarray</span> <span class="pre">|</span> <span class="pre">tensorflow.Tensor],</span> <span class="pre">dict[str,</span> <span class="pre">~typing.Any],</span> <span class="pre">str</span> <span class="pre">|</span> <span class="pre">None],</span> <span class="pre">~collections.abc.Iterable[~apache_beam.ml.inference.base.PredictionResult]]</span> <span class="pre">=</span> <span class="pre"><function</span> <span class="pre">default_numpy_inference_fn>,</span> <span class="pre">min_batch_size:</span> <span class="pre">int</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">max_batch_size:</span> <span class="pre">int</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">max_batch_duration_secs:</span> <span class="pre">int</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">large_model:</span> <span class="pre">bool</span> <span class="pre">=</span> <span class="pre">False,</span> <span class="pre">model_copies:</span> <span class="pre">int</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">**kwargs</span></em><span class="sig-paren">)</span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy" title="Link to this definition"></a></dt> |
| <dd><p>Bases: <a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler" title="apache_beam.ml.inference.base.ModelHandler"><code class="xref py py-class docutils literal notranslate"><span class="pre">ModelHandler</span></code></a>[<a class="reference external" href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="(in NumPy v2.3)"><code class="xref py py-class docutils literal notranslate"><span class="pre">ndarray</span></code></a>, <a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.PredictionResult" title="apache_beam.ml.inference.base.PredictionResult"><code class="xref py py-class docutils literal notranslate"><span class="pre">PredictionResult</span></code></a>, <code class="xref py py-class docutils literal notranslate"><span class="pre">Module</span></code>]</p> |
| <p>Implementation of the ModelHandler interface for Tensorflow.</p> |
| <p>Example Usage:</p> |
| <div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">pcoll</span> <span class="o">|</span> <span class="n">RunInference</span><span class="p">(</span><span class="n">TFModelHandlerNumpy</span><span class="p">(</span><span class="n">model_uri</span><span class="o">=</span><span class="s2">"my_uri"</span><span class="p">))</span> |
| </pre></div> |
| </div> |
| <p>See <a class="reference external" href="https://www.tensorflow.org/tutorials/keras/save_and_load">https://www.tensorflow.org/tutorials/keras/save_and_load</a> for details.</p> |
| <dl class="field-list simple"> |
| <dt class="field-odd">Parameters<span class="colon">:</span></dt> |
| <dd class="field-odd"><ul class="simple"> |
| <li><p><strong>model_uri</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><em>str</em></a>) – path to the trained model.</p></li> |
| <li><p><strong>model_type</strong> – type of model to be loaded. Defaults to SAVED_MODEL.</p></li> |
| <li><p><strong>create_model_fn</strong> – a function that creates and returns a new |
| tensorflow model to load the saved weights. |
| It should be used with ModelType.SAVED_WEIGHTS.</p></li> |
| <li><p><strong>load_model_args</strong> – a dictionary of parameters to pass to the load_model |
| function of TensorFlow to specify custom config.</p></li> |
| <li><p><strong>custom_weights</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><em>str</em></a>) – path to the custom weights to be applied |
| once the model is loaded.</p></li> |
| <li><p><strong>inference_fn</strong> – inference function to use during RunInference. |
| Defaults to default_numpy_inference_fn.</p></li> |
| <li><p><strong>large_model</strong> – set to true if your model is large enough to run into |
| memory pressure if you load multiple copies. Given a model that |
| consumes N memory and a machine with W cores and M memory, you should |
| set this to True if N*W > M.</p></li> |
| <li><p><strong>model_copies</strong> – The exact number of models that you would like loaded |
| onto your machine. This can be useful if you exactly know your CPU or |
| GPU capacity and want to maximize resource utilization.</p></li> |
| <li><p><strong>kwargs</strong> – ‘env_vars’ can be used to set environment variables |
| before loading the model.</p></li> |
| </ul> |
| </dd> |
| </dl> |
| <p><strong>Supported Versions:</strong> RunInference APIs in Apache Beam have been tested |
| with Tensorflow 2.9, 2.10, 2.11.</p> |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.load_model"> |
| <span class="sig-name descname"><span class="pre">load_model</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><span class="pre">tensorflow.Module</span></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.load_model"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.load_model" title="Link to this definition"></a></dt> |
| <dd><p>Loads and initializes a Tensorflow model for processing.</p> |
| </dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.update_model_path"> |
| <span class="sig-name descname"><span class="pre">update_model_path</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">model_path</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><span class="pre">str</span></a><span class="w"> </span><span class="p"><span class="pre">|</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/constants.html#None" title="(in Python v3.13)"><span class="pre">None</span></a></span><span class="w"> </span><span class="o"><span class="pre">=</span></span><span class="w"> </span><span class="default_value"><span class="pre">None</span></span></em><span class="sig-paren">)</span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.update_model_path"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.update_model_path" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.run_inference"> |
| <span class="sig-name descname"><span class="pre">run_inference</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">batch</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence" title="(in Python v3.13)"><span class="pre">Sequence</span></a><span class="p"><span class="pre">[</span></span><a class="reference external" href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="(in NumPy v2.3)"><span class="pre">ndarray</span></a><span class="p"><span class="pre">]</span></span></span></em>, <em class="sig-param"><span class="n"><span class="pre">model</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><span class="pre">tensorflow.Module</span></span></em>, <em class="sig-param"><span class="n"><span class="pre">inference_args</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.13)"><span class="pre">dict</span></a><span class="p"><span class="pre">[</span></span><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><span class="pre">str</span></a><span class="p"><span class="pre">,</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/typing.html#typing.Any" title="(in Python v3.13)"><span class="pre">Any</span></a><span class="p"><span class="pre">]</span></span><span class="w"> </span><span class="p"><span class="pre">|</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/constants.html#None" title="(in Python v3.13)"><span class="pre">None</span></a></span><span class="w"> </span><span class="o"><span class="pre">=</span></span><span class="w"> </span><span class="default_value"><span class="pre">None</span></span></em><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/collections.abc.html#collections.abc.Iterable" title="(in Python v3.13)"><span class="pre">Iterable</span></a><span class="p"><span class="pre">[</span></span><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.PredictionResult" title="apache_beam.ml.inference.base.PredictionResult"><span class="pre">PredictionResult</span></a><span class="p"><span class="pre">]</span></span></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.run_inference"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.run_inference" title="Link to this definition"></a></dt> |
| <dd><p>Runs inferences on a batch of numpy array and returns an Iterable of |
| numpy array Predictions.</p> |
| <p>This method stacks the n-dimensional numpy array in a vectorized format to |
| optimize the inference call.</p> |
| <dl class="field-list simple"> |
| <dt class="field-odd">Parameters<span class="colon">:</span></dt> |
| <dd class="field-odd"><ul class="simple"> |
| <li><p><strong>batch</strong> – A sequence of numpy nd-array. These should be batchable, as this |
| method will call <cite>numpy.stack()</cite> and pass in batched numpy nd-array |
| with dimensions (batch_size, n_features, etc.) into the model’s |
| predict() function.</p></li> |
| <li><p><strong>model</strong> – A Tensorflow model.</p></li> |
| <li><p><strong>inference_args</strong> – any additional arguments for an inference.</p></li> |
| </ul> |
| </dd> |
| <dt class="field-even">Returns<span class="colon">:</span></dt> |
| <dd class="field-even"><p>An Iterable of type PredictionResult.</p> |
| </dd> |
| </dl> |
| </dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.get_num_bytes"> |
| <span class="sig-name descname"><span class="pre">get_num_bytes</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">batch</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence" title="(in Python v3.13)"><span class="pre">Sequence</span></a><span class="p"><span class="pre">[</span></span><a class="reference external" href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="(in NumPy v2.3)"><span class="pre">ndarray</span></a><span class="p"><span class="pre">]</span></span></span></em><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.13)"><span class="pre">int</span></a></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.get_num_bytes"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.get_num_bytes" title="Link to this definition"></a></dt> |
| <dd><dl class="field-list simple"> |
| <dt class="field-odd">Returns<span class="colon">:</span></dt> |
| <dd class="field-odd"><p>The number of bytes of data for a batch of numpy arrays.</p> |
| </dd> |
| </dl> |
| </dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.get_metrics_namespace"> |
| <span class="sig-name descname"><span class="pre">get_metrics_namespace</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><span class="pre">str</span></a></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.get_metrics_namespace"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.get_metrics_namespace" title="Link to this definition"></a></dt> |
| <dd><dl class="field-list simple"> |
| <dt class="field-odd">Returns<span class="colon">:</span></dt> |
| <dd class="field-odd"><p>A namespace for metrics collected by the RunInference transform.</p> |
| </dd> |
| </dl> |
| </dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.validate_inference_args"> |
| <span class="sig-name descname"><span class="pre">validate_inference_args</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">inference_args</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.13)"><span class="pre">dict</span></a><span class="p"><span class="pre">[</span></span><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><span class="pre">str</span></a><span class="p"><span class="pre">,</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/typing.html#typing.Any" title="(in Python v3.13)"><span class="pre">Any</span></a><span class="p"><span class="pre">]</span></span><span class="w"> </span><span class="p"><span class="pre">|</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/constants.html#None" title="(in Python v3.13)"><span class="pre">None</span></a></span></em><span class="sig-paren">)</span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.validate_inference_args"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.validate_inference_args" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.batch_elements_kwargs"> |
| <span class="sig-name descname"><span class="pre">batch_elements_kwargs</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.batch_elements_kwargs"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.batch_elements_kwargs" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.share_model_across_processes"> |
| <span class="sig-name descname"><span class="pre">share_model_across_processes</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.13)"><span class="pre">bool</span></a></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.share_model_across_processes"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.share_model_across_processes" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.model_copies"> |
| <span class="sig-name descname"><span class="pre">model_copies</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.13)"><span class="pre">int</span></a></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerNumpy.model_copies"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerNumpy.model_copies" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| </dd></dl> |
| |
| <dl class="py class"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor"> |
| <em class="property"><span class="pre">class</span><span class="w"> </span></em><span class="sig-prename descclassname"><span class="pre">apache_beam.ml.inference.tensorflow_inference.</span></span><span class="sig-name descname"><span class="pre">TFModelHandlerTensor</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="pre">model_uri:</span> <span class="pre">str,</span> <span class="pre">model_type:</span> <span class="pre">~apache_beam.ml.inference.tensorflow_inference.ModelType</span> <span class="pre">=</span> <span class="pre">ModelType.SAVED_MODEL,</span> <span class="pre">create_model_fn:</span> <span class="pre">~collections.abc.Callable</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">*,</span> <span class="pre">load_model_args:</span> <span class="pre">dict[str,</span> <span class="pre">~typing.Any]</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">custom_weights:</span> <span class="pre">str</span> <span class="pre">=</span> <span class="pre">'',</span> <span class="pre">inference_fn:</span> <span class="pre">~collections.abc.Callable[[tensorflow.Module,</span> <span class="pre">~collections.abc.Sequence[~numpy.ndarray</span> <span class="pre">|</span> <span class="pre">tensorflow.Tensor],</span> <span class="pre">dict[str,</span> <span class="pre">~typing.Any],</span> <span class="pre">str</span> <span class="pre">|</span> <span class="pre">None],</span> <span class="pre">~collections.abc.Iterable[~apache_beam.ml.inference.base.PredictionResult]]</span> <span class="pre">=</span> <span class="pre"><function</span> <span class="pre">default_tensor_inference_fn>,</span> <span class="pre">min_batch_size:</span> <span class="pre">int</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">max_batch_size:</span> <span class="pre">int</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">max_batch_duration_secs:</span> <span class="pre">int</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">large_model:</span> <span class="pre">bool</span> <span class="pre">=</span> <span class="pre">False,</span> <span class="pre">model_copies:</span> <span class="pre">int</span> <span class="pre">|</span> <span class="pre">None</span> <span class="pre">=</span> <span class="pre">None,</span> <span class="pre">**kwargs</span></em><span class="sig-paren">)</span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor" title="Link to this definition"></a></dt> |
| <dd><p>Bases: <a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler" title="apache_beam.ml.inference.base.ModelHandler"><code class="xref py py-class docutils literal notranslate"><span class="pre">ModelHandler</span></code></a>[<code class="xref py py-class docutils literal notranslate"><span class="pre">Tensor</span></code>, <a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.PredictionResult" title="apache_beam.ml.inference.base.PredictionResult"><code class="xref py py-class docutils literal notranslate"><span class="pre">PredictionResult</span></code></a>, <code class="xref py py-class docutils literal notranslate"><span class="pre">Module</span></code>]</p> |
| <p>Implementation of the ModelHandler interface for Tensorflow.</p> |
| <p>Example Usage:</p> |
| <div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">pcoll</span> <span class="o">|</span> <span class="n">RunInference</span><span class="p">(</span><span class="n">TFModelHandlerTensor</span><span class="p">(</span><span class="n">model_uri</span><span class="o">=</span><span class="s2">"my_uri"</span><span class="p">))</span> |
| </pre></div> |
| </div> |
| <p>See <a class="reference external" href="https://www.tensorflow.org/tutorials/keras/save_and_load">https://www.tensorflow.org/tutorials/keras/save_and_load</a> for details.</p> |
| <dl class="field-list simple"> |
| <dt class="field-odd">Parameters<span class="colon">:</span></dt> |
| <dd class="field-odd"><ul class="simple"> |
| <li><p><strong>model_uri</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><em>str</em></a>) – path to the trained model.</p></li> |
| <li><p><strong>model_type</strong> – type of model to be loaded. |
| Defaults to SAVED_MODEL.</p></li> |
| <li><p><strong>create_model_fn</strong> – a function that creates and returns a new |
| tensorflow model to load the saved weights. |
| It should be used with ModelType.SAVED_WEIGHTS.</p></li> |
| <li><p><strong>load_model_args</strong> – a dictionary of parameters to pass to the load_model |
| function of TensorFlow to specify custom config.</p></li> |
| <li><p><strong>custom_weights</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><em>str</em></a>) – path to the custom weights to be applied |
| once the model is loaded.</p></li> |
| <li><p><strong>inference_fn</strong> – inference function to use during RunInference. |
| Defaults to default_numpy_inference_fn.</p></li> |
| <li><p><strong>min_batch_size</strong> – the minimum batch size to use when batching inputs.</p></li> |
| <li><p><strong>max_batch_size</strong> – the maximum batch size to use when batching inputs.</p></li> |
| <li><p><strong>max_batch_duration_secs</strong> – the maximum amount of time to buffer a batch |
| before emitting; used in streaming contexts.</p></li> |
| <li><p><strong>large_model</strong> – set to true if your model is large enough to run into |
| memory pressure if you load multiple copies. Given a model that |
| consumes N memory and a machine with W cores and M memory, you should |
| set this to True if N*W > M.</p></li> |
| <li><p><strong>model_copies</strong> – The exact number of models that you would like loaded |
| onto your machine. This can be useful if you exactly know your CPU or |
| GPU capacity and want to maximize resource utilization.</p></li> |
| <li><p><strong>kwargs</strong> – ‘env_vars’ can be used to set environment variables |
| before loading the model.</p></li> |
| </ul> |
| </dd> |
| </dl> |
| <p><strong>Supported Versions:</strong> RunInference APIs in Apache Beam have been tested |
| with Tensorflow 2.11.</p> |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.load_model"> |
| <span class="sig-name descname"><span class="pre">load_model</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><span class="pre">tensorflow.Module</span></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.load_model"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.load_model" title="Link to this definition"></a></dt> |
| <dd><p>Loads and initializes a tensorflow model for processing.</p> |
| </dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.update_model_path"> |
| <span class="sig-name descname"><span class="pre">update_model_path</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">model_path</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><span class="pre">str</span></a><span class="w"> </span><span class="p"><span class="pre">|</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/constants.html#None" title="(in Python v3.13)"><span class="pre">None</span></a></span><span class="w"> </span><span class="o"><span class="pre">=</span></span><span class="w"> </span><span class="default_value"><span class="pre">None</span></span></em><span class="sig-paren">)</span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.update_model_path"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.update_model_path" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.run_inference"> |
| <span class="sig-name descname"><span class="pre">run_inference</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">batch</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence" title="(in Python v3.13)"><span class="pre">Sequence</span></a><span class="p"><span class="pre">[</span></span><span class="pre">tensorflow.Tensor</span><span class="p"><span class="pre">]</span></span></span></em>, <em class="sig-param"><span class="n"><span class="pre">model</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><span class="pre">tensorflow.Module</span></span></em>, <em class="sig-param"><span class="n"><span class="pre">inference_args</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.13)"><span class="pre">dict</span></a><span class="p"><span class="pre">[</span></span><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><span class="pre">str</span></a><span class="p"><span class="pre">,</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/typing.html#typing.Any" title="(in Python v3.13)"><span class="pre">Any</span></a><span class="p"><span class="pre">]</span></span><span class="w"> </span><span class="p"><span class="pre">|</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/constants.html#None" title="(in Python v3.13)"><span class="pre">None</span></a></span><span class="w"> </span><span class="o"><span class="pre">=</span></span><span class="w"> </span><span class="default_value"><span class="pre">None</span></span></em><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/collections.abc.html#collections.abc.Iterable" title="(in Python v3.13)"><span class="pre">Iterable</span></a><span class="p"><span class="pre">[</span></span><a class="reference internal" href="apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.PredictionResult" title="apache_beam.ml.inference.base.PredictionResult"><span class="pre">PredictionResult</span></a><span class="p"><span class="pre">]</span></span></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.run_inference"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.run_inference" title="Link to this definition"></a></dt> |
| <dd><p>Runs inferences on a batch of tf.Tensor and returns an Iterable of |
| Tensor Predictions.</p> |
| <p>This method stacks the list of Tensors in a vectorized format to optimize |
| the inference call.</p> |
| <dl class="field-list simple"> |
| <dt class="field-odd">Parameters<span class="colon">:</span></dt> |
| <dd class="field-odd"><ul class="simple"> |
| <li><p><strong>batch</strong> – A sequence of Tensors. These Tensors should be batchable, as this |
| method will call <cite>tf.stack()</cite> and pass in batched Tensors with |
| dimensions (batch_size, n_features, etc.) into the model’s predict() |
| function.</p></li> |
| <li><p><strong>model</strong> – A Tensorflow model.</p></li> |
| <li><p><strong>inference_args</strong> – Non-batchable arguments required as inputs to the model’s |
| forward() function. Unlike Tensors in <cite>batch</cite>, these parameters will |
| not be dynamically batched</p></li> |
| </ul> |
| </dd> |
| <dt class="field-even">Returns<span class="colon">:</span></dt> |
| <dd class="field-even"><p>An Iterable of type PredictionResult.</p> |
| </dd> |
| </dl> |
| </dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.get_num_bytes"> |
| <span class="sig-name descname"><span class="pre">get_num_bytes</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">batch</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence" title="(in Python v3.13)"><span class="pre">Sequence</span></a><span class="p"><span class="pre">[</span></span><span class="pre">tensorflow.Tensor</span><span class="p"><span class="pre">]</span></span></span></em><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.13)"><span class="pre">int</span></a></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.get_num_bytes"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.get_num_bytes" title="Link to this definition"></a></dt> |
| <dd><dl class="field-list simple"> |
| <dt class="field-odd">Returns<span class="colon">:</span></dt> |
| <dd class="field-odd"><p>The number of bytes of data for a batch of Tensors.</p> |
| </dd> |
| </dl> |
| </dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.get_metrics_namespace"> |
| <span class="sig-name descname"><span class="pre">get_metrics_namespace</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><span class="pre">str</span></a></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.get_metrics_namespace"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.get_metrics_namespace" title="Link to this definition"></a></dt> |
| <dd><dl class="field-list simple"> |
| <dt class="field-odd">Returns<span class="colon">:</span></dt> |
| <dd class="field-odd"><p>A namespace for metrics collected by the RunInference transform.</p> |
| </dd> |
| </dl> |
| </dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.validate_inference_args"> |
| <span class="sig-name descname"><span class="pre">validate_inference_args</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">inference_args</span></span><span class="p"><span class="pre">:</span></span><span class="w"> </span><span class="n"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.13)"><span class="pre">dict</span></a><span class="p"><span class="pre">[</span></span><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.13)"><span class="pre">str</span></a><span class="p"><span class="pre">,</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/typing.html#typing.Any" title="(in Python v3.13)"><span class="pre">Any</span></a><span class="p"><span class="pre">]</span></span><span class="w"> </span><span class="p"><span class="pre">|</span></span><span class="w"> </span><a class="reference external" href="https://docs.python.org/3/library/constants.html#None" title="(in Python v3.13)"><span class="pre">None</span></a></span></em><span class="sig-paren">)</span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.validate_inference_args"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.validate_inference_args" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.batch_elements_kwargs"> |
| <span class="sig-name descname"><span class="pre">batch_elements_kwargs</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.batch_elements_kwargs"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.batch_elements_kwargs" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.share_model_across_processes"> |
| <span class="sig-name descname"><span class="pre">share_model_across_processes</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.13)"><span class="pre">bool</span></a></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.share_model_across_processes"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.share_model_across_processes" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| <dl class="py method"> |
| <dt class="sig sig-object py" id="apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.model_copies"> |
| <span class="sig-name descname"><span class="pre">model_copies</span></span><span class="sig-paren">(</span><span class="sig-paren">)</span> <span class="sig-return"><span class="sig-return-icon">→</span> <span class="sig-return-typehint"><a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.13)"><span class="pre">int</span></a></span></span><a class="reference internal" href="_modules/apache_beam/ml/inference/tensorflow_inference.html#TFModelHandlerTensor.model_copies"><span class="viewcode-link"><span class="pre">[source]</span></span></a><a class="headerlink" href="#apache_beam.ml.inference.tensorflow_inference.TFModelHandlerTensor.model_copies" title="Link to this definition"></a></dt> |
| <dd></dd></dl> |
| |
| </dd></dl> |
| |
| </section> |
| |
| |
| </div> |
| </div> |
| <footer><div class="rst-footer-buttons" role="navigation" aria-label="Footer"> |
| <a href="apache_beam.ml.inference.sklearn_inference.html" class="btn btn-neutral float-left" title="apache_beam.ml.inference.sklearn_inference module" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left" aria-hidden="true"></span> Previous</a> |
| <a href="apache_beam.ml.inference.tensorrt_inference.html" class="btn btn-neutral float-right" title="apache_beam.ml.inference.tensorrt_inference module" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right" aria-hidden="true"></span></a> |
| </div> |
| |
| <hr/> |
| |
| <div role="contentinfo"> |
| <p>© Copyright %Y, Apache Beam.</p> |
| </div> |
| |
| Built with <a href="https://www.sphinx-doc.org/">Sphinx</a> using a |
| <a href="https://github.com/readthedocs/sphinx_rtd_theme">theme</a> |
| provided by <a href="https://readthedocs.org">Read the Docs</a>. |
| |
| |
| </footer> |
| </div> |
| </div> |
| </section> |
| </div> |
| <script> |
| jQuery(function () { |
| SphinxRtdTheme.Navigation.enable(true); |
| }); |
| </script> |
| |
| </body> |
| </html> |