blob: bd339aa059d7a1bccca0f0e4382521dd528d8694 [file] [log] [blame]
(window.webpackJsonp=window.webpackJsonp||[]).push([[71],{138:function(e,n,t){"use strict";t.r(n),t.d(n,"frontMatter",(function(){return l})),t.d(n,"metadata",(function(){return i})),t.d(n,"toc",(function(){return c})),t.d(n,"default",(function(){return u}));var r=t(3),o=t(7),a=(t(0),t(144)),l={title:"Docker Images for TensorFlow"},i={unversionedId:"userDocs/yarn/WriteDockerfileTF",id:"userDocs/yarn/WriteDockerfileTF",isDocsHomePage:!1,title:"Docker Images for TensorFlow",description:"\x3c!--",source:"@site/docs/userDocs/yarn/WriteDockerfileTF.md",slug:"/userDocs/yarn/WriteDockerfileTF",permalink:"/docs/userDocs/yarn/WriteDockerfileTF",editUrl:"https://github.com/apache/submarine/edit/master/website/docs/userDocs/yarn/WriteDockerfileTF.md",version:"current"},c=[{value:"How to create docker images to run Tensorflow on YARN",id:"how-to-create-docker-images-to-run-tensorflow-on-yarn",children:[]},{value:"Use examples to build your own Tensorflow docker images",id:"use-examples-to-build-your-own-tensorflow-docker-images",children:[]},{value:"Build Docker images",id:"build-docker-images",children:[{value:"Manually build Docker image:",id:"manually-build-docker-image",children:[]},{value:"Use prebuilt images",id:"use-prebuilt-images",children:[]}]}],p={toc:c};function u(e){var n=e.components,t=Object(o.a)(e,["components"]);return Object(a.b)("wrapper",Object(r.a)({},p,t,{components:n,mdxType:"MDXLayout"}),Object(a.b)("h2",{id:"how-to-create-docker-images-to-run-tensorflow-on-yarn"},"How to create docker images to run Tensorflow on YARN"),Object(a.b)("p",null,"Dockerfile to run Tensorflow on YARN need two part:"),Object(a.b)("p",null,Object(a.b)("strong",{parentName:"p"},"Base libraries which Tensorflow depends on")),Object(a.b)("p",null,"1) OS base image, for example ",Object(a.b)("inlineCode",{parentName:"p"},"ubuntu:18.04")),Object(a.b)("p",null,"2) Tensorflow depended libraries and packages. For example ",Object(a.b)("inlineCode",{parentName:"p"},"python"),", ",Object(a.b)("inlineCode",{parentName:"p"},"scipy"),". For GPU support, need ",Object(a.b)("inlineCode",{parentName:"p"},"cuda"),", ",Object(a.b)("inlineCode",{parentName:"p"},"cudnn"),", etc."),Object(a.b)("p",null,"3) Tensorflow package."),Object(a.b)("p",null,Object(a.b)("strong",{parentName:"p"},"Libraries to access HDFS")),Object(a.b)("p",null,"1) JDK"),Object(a.b)("p",null,"2) Hadoop"),Object(a.b)("p",null,"Here's an example of a base image (w/o GPU support) to install Tensorflow:"),Object(a.b)("pre",null,Object(a.b)("code",{parentName:"pre",className:"language-shell"},"FROM ubuntu:18.04\n\n# Pick up some TF dependencies\nRUN apt-get update && apt-get install -y --no-install-recommends \\\n build-essential \\\n curl \\\n libfreetype6-dev \\\n libpng-dev \\\n libzmq3-dev \\\n pkg-config \\\n python \\\n python-dev \\\n rsync \\\n software-properties-common \\\n unzip \\\n && \\\n apt-get clean && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN export DEBIAN_FRONTEND=noninteractive && apt-get update && apt-get install -yq krb5-user libpam-krb5 && apt-get clean\n\nRUN curl -O https://bootstrap.pypa.io/get-pip.py && \\\n python get-pip.py && \\\n rm get-pip.py\n\nRUN pip --no-cache-dir install \\\n Pillow \\\n h5py \\\n ipykernel \\\n jupyter \\\n matplotlib \\\n numpy \\\n pandas \\\n scipy \\\n sklearn \\\n && \\\n python -m ipykernel.kernelspec\n\nRUN pip --no-cache-dir install \\\n http://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.13.1-cp27-none-linux_x86_64.whl\n")),Object(a.b)("p",null,"On top of above image, add files, install packages to access HDFS"),Object(a.b)("pre",null,Object(a.b)("code",{parentName:"pre",className:"language-shell"},'RUN apt-get update && apt-get install -y openjdk-8-jdk wget\n# Install hadoop\nENV HADOOP_VERSION="2.9.2"\nRUN wget http://mirrors.hust.edu.cn/apache/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz\nRUN tar zxf hadoop-${HADOOP_VERSION}.tar.gz\nRUN ln -s hadoop-${HADOOP_VERSION} hadoop-current\nRUN rm hadoop-${HADOOP_VERSION}.tar.gz\n')),Object(a.b)("p",null,"Build and push to your own docker registry: Use ",Object(a.b)("inlineCode",{parentName:"p"},"docker build ... ")," and ",Object(a.b)("inlineCode",{parentName:"p"},"docker push ...")," to finish this step."),Object(a.b)("h2",{id:"use-examples-to-build-your-own-tensorflow-docker-images"},"Use examples to build your own Tensorflow docker images"),Object(a.b)("p",null,"We provided following examples for you to build tensorflow docker images."),Object(a.b)("p",null,"For Tensorflow 1.13.1 (Precompiled to CUDA 10.x)"),Object(a.b)("ul",null,Object(a.b)("li",{parentName:"ul"},Object(a.b)("em",{parentName:"li"},"docker/tensorflow/base/ubuntu-18.04/Dockerfile.cpu.tf_1.13.1"),": Tensorflow 1.13.1 supports CPU only."),Object(a.b)("li",{parentName:"ul"},Object(a.b)("em",{parentName:"li"},"docker/tensorflow/with-cifar10-models/ubuntu-18.04/Dockerfile.cpu.tf_1.13.1"),": Tensorflow 1.13.1 supports CPU only, and included models"),Object(a.b)("li",{parentName:"ul"},Object(a.b)("em",{parentName:"li"},"docker/tensorflow/base/ubuntu-18.04/Dockerfile.gpu.tf_1.13.1"),": Tensorflow 1.13.1 supports GPU, which is prebuilt to CUDA10."),Object(a.b)("li",{parentName:"ul"},Object(a.b)("em",{parentName:"li"},"docker/tensorflow/with-cifar10-models/ubuntu-18.04/Dockerfile.gpu.tf_1.13.1"),": Tensorflow 1.13.1 supports GPU, which is prebuilt to CUDA10, with models.")),Object(a.b)("h2",{id:"build-docker-images"},"Build Docker images"),Object(a.b)("h3",{id:"manually-build-docker-image"},"Manually build Docker image:"),Object(a.b)("p",null,"Under ",Object(a.b)("inlineCode",{parentName:"p"},"docker/")," directory, run ",Object(a.b)("inlineCode",{parentName:"p"},"build-all.sh")," to build Docker images. It will build following images:"),Object(a.b)("ul",null,Object(a.b)("li",{parentName:"ul"},Object(a.b)("inlineCode",{parentName:"li"},"tf-1.13.1-gpu-base:0.0.1")," for base Docker image which includes Hadoop, Tensorflow, GPU base libraries."),Object(a.b)("li",{parentName:"ul"},Object(a.b)("inlineCode",{parentName:"li"},"tf-1.13.1-gpu-base:0.0.1")," for base Docker image which includes Hadoop. Tensorflow."),Object(a.b)("li",{parentName:"ul"},Object(a.b)("inlineCode",{parentName:"li"},"tf-1.13.1-gpu:0.0.1")," which includes cifar10 model"),Object(a.b)("li",{parentName:"ul"},Object(a.b)("inlineCode",{parentName:"li"},"tf-1.13.1-cpu:0.0.1")," which inclues cifar10 model (cpu only).")),Object(a.b)("h3",{id:"use-prebuilt-images"},"Use prebuilt images"),Object(a.b)("p",null,"(No liability)\nYou can also use prebuilt images for convenience:"),Object(a.b)("ul",null,Object(a.b)("li",{parentName:"ul"},"hadoopsubmarine/tf-1.13.1-gpu:0.0.1"),Object(a.b)("li",{parentName:"ul"},"hadoopsubmarine/tf-1.13.1-cpu:0.0.1")))}u.isMDXComponent=!0},144:function(e,n,t){"use strict";t.d(n,"a",(function(){return s})),t.d(n,"b",(function(){return m}));var r=t(0),o=t.n(r);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function i(e){for(var n=1;n<arguments.length;n++){var t=null!=arguments[n]?arguments[n]:{};n%2?l(Object(t),!0).forEach((function(n){a(e,n,t[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):l(Object(t)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(t,n))}))}return e}function c(e,n){if(null==e)return{};var t,r,o=function(e,n){if(null==e)return{};var t,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)t=a[r],n.indexOf(t)>=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)t=a[r],n.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=o.a.createContext({}),u=function(e){var n=o.a.useContext(p),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},s=function(e){var n=u(e.components);return o.a.createElement(p.Provider,{value:n},e.children)},b={inlineCode:"code",wrapper:function(e){var n=e.children;return o.a.createElement(o.a.Fragment,{},n)}},d=o.a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,p=c(e,["components","mdxType","originalType","parentName"]),s=u(t),d=r,m=s["".concat(l,".").concat(d)]||s[d]||b[d]||a;return t?o.a.createElement(m,i(i({ref:n},p),{},{components:t})):o.a.createElement(m,i({ref:n},p))}));function m(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,l=new Array(a);l[0]=d;var i={};for(var c in n)hasOwnProperty.call(n,c)&&(i[c]=n[c]);i.originalType=e,i.mdxType="string"==typeof e?e:r,l[1]=i;for(var p=2;p<a;p++)l[p]=t[p];return o.a.createElement.apply(null,l)}return o.a.createElement.apply(null,t)}d.displayName="MDXCreateElement"}}]);