blob: 2addb4f14642441a410391975bc6facfde456a27 [file] [log] [blame]
(window.webpackJsonp=window.webpackJsonp||[]).push([[89],{435:function(e,t,a){"use strict";a.r(t);var n=a(11),r=Object(n.a)({},(function(){var e=this,t=e.$createElement,a=e._self._c||t;return a("ContentSlotsDistributor",{attrs:{"slot-key":e.$parent.slotKey}},[a("h1",{attrs:{id:"inference-task-with-tvm-in-teaclave"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#inference-task-with-tvm-in-teaclave"}},[e._v("#")]),e._v(" Inference Task with TVM in Teaclave")]),e._v(" "),a("p",[e._v("Inference and model training are two important topics in machine learning.\nThanks to "),a("a",{attrs:{href:"https://tvm.apache.org/",target:"_blank",rel:"noopener noreferrer"}},[e._v("TVM")]),e._v(" and "),a("a",{attrs:{href:"https://teaclave.apache.org/docs/executing-wasm/",target:"_blank",rel:"noopener noreferrer"}},[e._v("WebAssembly\nExecutor")]),e._v(", Teaclave is now\nable to run the former—inference tasks. TVM can convert a model (or computation\ngraph) to an intermediate representation (IR) defined by TVM, and compile the\nbinary of this model from the IR. Since TVM recruits LLVM to emit binary code\nand LLVM support WebAssembly as backend, Teaclave's WebAssembly Executor can\nthen execute the model's binary with additional lightweight runtime provided by\nTVM.")]),e._v(" "),a("p",[e._v("Although TVM has already provided an "),a("a",{attrs:{href:"https://github.com/apache/tvm/tree/main/apps/wasm-standalone",target:"_blank",rel:"noopener noreferrer"}},[e._v("wasm-standalone example\napp")]),e._v(", we still\ncannot copy and run it in Teaclave due to lack of WASI support and specific\ncontext file interface. This document mainly focuses on the "),a("em",[e._v("what's different")]),e._v("\nin Teaclave and we will finally build a MNIST inference function for Teaclave.")]),e._v(" "),a("h2",{attrs:{id:"preparing-tvm-and-dependencies"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#preparing-tvm-and-dependencies"}},[e._v("#")]),e._v(" Preparing TVM and Dependencies")]),e._v(" "),a("p",[e._v("All the dependencies has been installed or built in our docker image. If you do\nnot want to waste time on this step, you can skip this section with "),a("a",{attrs:{href:"https://hub.docker.com/layers/teaclave/teaclave-build-ubuntu-1804-sgx-2.17.1/0.1.0/images/sha256-fd6c0e8a0bac53327408091ac8e11f911711477747116ecb4f3bf1094fb24c17",target:"_blank",rel:"noopener noreferrer"}},[e._v("our\nimage")]),e._v("\nprepared.")]),e._v(" "),a("p",[e._v("TVM provides detailed build instruction in "),a("a",{attrs:{href:"https://tvm.apache.org/docs/install/from_source.html",target:"_blank",rel:"noopener noreferrer"}},[e._v("the\ndocument")]),e._v(". Besides the\ndependencies listed on their website, we also need to install (e.g. on Ubuntu\n18.04) these packages to build the example.")]),e._v(" "),a("div",{staticClass:"language-sh extra-class"},[a("pre",{pre:!0,attrs:{class:"language-text"}},[a("code",[e._v("sudo apt install protobuf-compiler libprotoc-dev llvm-10 clang-10\npip3 install onnx==1.9.0 numpy decorator attrs spicy\n")])])]),a("div",{staticClass:"custom-block tip"},[a("p",{staticClass:"custom-block-title"},[e._v("NOTE")]),e._v(" "),a("p",[e._v("At the time of writing this document, latest "),a("code",[e._v("onnx")]),e._v(" cannot work\nbecause it depends on a higher version "),a("code",[e._v("protobuf")]),e._v(", which is not provided by\nUbuntu 18.04. We tested TVM with commit hash\n"),a("code",[e._v("df06c5848f59108a8e6e7dffb997b4b659b573a7")]),e._v(". Later versions may work, but commits\nolder than this one hardly work.")])]),e._v(" "),a("h2",{attrs:{id:"compiling-wasm-library"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#compiling-wasm-library"}},[e._v("#")]),e._v(" Compiling WASM Library")]),e._v(" "),a("p",[e._v("TVM offers a set of Python APIs for downloading, building, and testing the\nmodel. Specifically, to compile a graph into binary, we need to:")]),e._v(" "),a("ol",[a("li",[e._v("Download the model")]),e._v(" "),a("li",[e._v("Determine the name and shape of input")]),e._v(" "),a("li",[e._v("Generate TVM IR module")]),e._v(" "),a("li",[e._v("Compile(build) to LLVM WebAssembly target")]),e._v(" "),a("li",[e._v("Save the object, graph, and param files")]),e._v(" "),a("li",[e._v("Archive the object("),a("code",[e._v("llvm-ar")]),e._v(") to a static library")])]),e._v(" "),a("p",[e._v("After completing these steps, we will generate a static library with the\n"),a("code",[e._v("PackedFunc")]),e._v(" exported for inference task.")]),e._v(" "),a("p",[e._v("The complete example build script can be found\n"),a("a",{attrs:{href:"https://github.com/apache/incubator-teaclave/blob/master/examples/python/wasm_tvm_mnist_payload/build_lib.py",target:"_blank",rel:"noopener noreferrer"}},[e._v("here")]),e._v(".")]),e._v(" "),a("h2",{attrs:{id:"bridging-with-teaclave"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#bridging-with-teaclave"}},[e._v("#")]),e._v(" Bridging with Teaclave")]),e._v(" "),a("p",[e._v("Although the library is in WebAssembly, we can not use it directly in Teaclave\nbecause it lacks parameters, and the interfaces is also not compatible with\nTeaclave. So we need a wrapper program which contains a small runtime for the\ncompiled computation graph. This wrapper should:")]),e._v(" "),a("ul",[a("li",[e._v("Load model parameters and graph json")]),e._v(" "),a("li",[e._v("Link with the graph library generated in the previous section")]),e._v(" "),a("li",[e._v("Export an entrypoint which is compatible with the Teaclave's interface")]),e._v(" "),a("li",[e._v("Read input data(image) using Teaclave's API and convert it to tensor")]),e._v(" "),a("li",[e._v("Call the graph function and get the result back")])]),e._v(" "),a("p",[e._v("Our wrapper is dependent on TVM's Rust APIs. We use "),a("code",[e._v("GraphExecutor")]),e._v(" to achieve\ncalling to the graph library. Detailed mechanisms are explained in "),a("a",{attrs:{href:"https://github.com/apache/tvm/tree/main/apps/wasm-standalone",target:"_blank",rel:"noopener noreferrer"}},[e._v("TVM's\nexample")]),e._v(". our\nexample can be found\n"),a("a",{attrs:{href:"https://github.com/apache/incubator-teaclave/tree/master/examples/python/wasm_tvm_mnist_payload",target:"_blank",rel:"noopener noreferrer"}},[e._v("here")]),e._v(".")]),e._v(" "),a("div",{staticClass:"custom-block tip"},[a("p",{staticClass:"custom-block-title"},[e._v("NOTE")]),e._v(" "),a("p",[e._v("To compile a Teaclave-compatible WASM binary, please make sure your\nRust version > 1.53. We tested on 1.54 stable.")])]),e._v(" "),a("h2",{attrs:{id:"execute-the-function"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#execute-the-function"}},[e._v("#")]),e._v(" Execute the function")]),e._v(" "),a("p",[e._v("Just like any other Teaclave function, users need to prepare a simple Python\nscript to pass the function and data to Teaclave, and then get the result back.\nThe script of this example is\n"),a("a",{attrs:{href:"https://github.com/apache/incubator-teaclave/blob/master/examples/python/wasm_tvm_mnist.py",target:"_blank",rel:"noopener noreferrer"}},[e._v("here")]),e._v(".")]),e._v(" "),a("div",{staticClass:"custom-block tip"},[a("p",{staticClass:"custom-block-title"},[e._v("NOTE")]),e._v(" "),a("p",[e._v("To compile a Teaclave-compatible WASM binary, please make sure your\nRust version > 1.53. We tested on 1.54 stable.")])])])}),[],!1,null,null,null);t.default=r.exports}}]);