blob: e678ee3775cbd23cc1e4e659c900e09d3f964541 [file] [log] [blame]
"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[7103],{3905:function(e,n,r){r.d(n,{Zo:function(){return p},kt:function(){return m}});var t=r(7294);function a(e,n,r){return n in e?Object.defineProperty(e,n,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[n]=r,e}function i(e,n){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),r.push.apply(r,t)}return r}function o(e){for(var n=1;n<arguments.length;n++){var r=null!=arguments[n]?arguments[n]:{};n%2?i(Object(r),!0).forEach((function(n){a(e,n,r[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):i(Object(r)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(r,n))}))}return e}function s(e,n){if(null==e)return{};var r,t,a=function(e,n){if(null==e)return{};var r,t,a={},i=Object.keys(e);for(t=0;t<i.length;t++)r=i[t],n.indexOf(r)>=0||(a[r]=e[r]);return a}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t<i.length;t++)r=i[t],n.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var u=t.createContext({}),l=function(e){var n=t.useContext(u),r=n;return e&&(r="function"==typeof e?e(n):o(o({},n),e)),r},p=function(e){var n=l(e.components);return t.createElement(u.Provider,{value:n},e.children)},c={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var r=e.components,a=e.mdxType,i=e.originalType,u=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),d=l(r),m=a,g=d["".concat(u,".").concat(m)]||d[m]||c[m]||i;return r?t.createElement(g,o(o({ref:n},p),{},{components:r})):t.createElement(g,o({ref:n},p))}));function m(e,n){var r=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var i=r.length,o=new Array(i);o[0]=d;var s={};for(var u in n)hasOwnProperty.call(n,u)&&(s[u]=n[u]);s.originalType=e,s.mdxType="string"==typeof e?e:a,o[1]=s;for(var l=2;l<i;l++)o[l]=r[l];return t.createElement.apply(null,o)}return t.createElement.apply(null,r)}d.displayName="MDXCreateElement"},7454:function(e,n,r){r.r(n),r.d(n,{assets:function(){return p},contentTitle:function(){return u},default:function(){return m},frontMatter:function(){return s},metadata:function(){return l},toc:function(){return c}});var t=r(7462),a=r(3366),i=(r(7294),r(3905)),o=["components"],s={title:"Submarine Spark Security Plugin"},u=void 0,l={unversionedId:"userDocs/submarine-security/spark-security/README",id:"version-0.6.0/userDocs/submarine-security/spark-security/README",title:"Submarine Spark Security Plugin",description:"\x3c!--",source:"@site/versioned_docs/version-0.6.0/userDocs/submarine-security/spark-security/README.md",sourceDirName:"userDocs/submarine-security/spark-security",slug:"/userDocs/submarine-security/spark-security/",permalink:"/docs/0.6.0/userDocs/submarine-security/spark-security/",editUrl:"https://github.com/apache/submarine/edit/master/website/versioned_docs/version-0.6.0/userDocs/submarine-security/spark-security/README.md",tags:[],version:"0.6.0",frontMatter:{title:"Submarine Spark Security Plugin"},sidebar:"docs",previous:{title:"Tracking",permalink:"/docs/0.6.0/userDocs/submarine-sdk/tracking"},next:{title:"Building Submarine Spark Security Plugin",permalink:"/docs/0.6.0/userDocs/submarine-security/spark-security/build-submarine-spark-security-plugin"}},p={},c=[{value:"Build",id:"build",level:2},{value:"Quick Start",id:"quick-start",level:2},{value:"Installation",id:"installation",level:3},{value:"Configurations",id:"configurations",level:3},{value:"Settings for Apache Ranger",id:"settings-for-apache-ranger",level:4},{value:"Settings for Apache Spark",id:"settings-for-apache-spark",level:4}],d={toc:c};function m(e){var n=e.components,r=(0,a.Z)(e,o);return(0,i.kt)("wrapper",(0,t.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"ACL Management for Apache Spark SQL with Apache Ranger, enabling:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"Table/Column level authorization"),(0,i.kt)("li",{parentName:"ul"},"Row level filtering"),(0,i.kt)("li",{parentName:"ul"},"Data masking")),(0,i.kt)("p",null,"Security is one of fundamental features for enterprise adoption. ",(0,i.kt)("a",{parentName:"p",href:"https://ranger.apache.org"},"Apache Ranger\u2122")," offers many security plugins for many Hadoop ecosystem components,\nsuch as HDFS, Hive, HBase, Solr and Sqoop2. However, ",(0,i.kt)("a",{parentName:"p",href:"http://spark.apache.org"},"Apache Spark\u2122")," is not counted in yet.\nWhen a secured HDFS cluster is used as a data warehouse accessed by various users and groups via different applications wrote by Spark and Hive,\nit is very difficult to guarantee data management in a consistent way. Apache Spark users visit data warehouse only\nwith Storage based access controls offered by HDFS. This library enables Spark with SQL Standard Based Authorization. "),(0,i.kt)("h2",{id:"build"},"Build"),(0,i.kt)("p",null,"Please refer to the online documentation - ",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/userDocs/submarine-security/spark-security/build-submarine-spark-security-plugin"},"Building submarine spark security plguin")),(0,i.kt)("h2",{id:"quick-start"},"Quick Start"),(0,i.kt)("p",null,"Three steps to integrate Apache Spark and Apache Ranger."),(0,i.kt)("h3",{id:"installation"},"Installation"),(0,i.kt)("p",null,"Place the submarine-spark-security-","<","version",">",".jar into ",(0,i.kt)("inlineCode",{parentName:"p"},"$SPARK_HOME/jars"),"."),(0,i.kt)("h3",{id:"configurations"},"Configurations"),(0,i.kt)("h4",{id:"settings-for-apache-ranger"},"Settings for Apache Ranger"),(0,i.kt)("p",null,"Create ",(0,i.kt)("inlineCode",{parentName:"p"},"ranger-spark-security.xml")," in ",(0,i.kt)("inlineCode",{parentName:"p"},"$SPARK_HOME/conf")," and add the following configurations\nfor pointing to the right Apache Ranger admin server."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-xml"},"\n<configuration>\n\n <property>\n <name>ranger.plugin.spark.policy.rest.url</name>\n <value>ranger admin address like http://ranger-admin.org:6080</value>\n </property>\n\n <property>\n <name>ranger.plugin.spark.service.name</name>\n <value>a ranger hive service name</value>\n </property>\n\n <property>\n <name>ranger.plugin.spark.policy.cache.dir</name>\n <value>./a ranger hive service name/policycache</value>\n </property>\n\n <property>\n <name>ranger.plugin.spark.policy.pollIntervalMs</name>\n <value>5000</value>\n </property>\n\n <property>\n <name>ranger.plugin.spark.policy.source.impl</name>\n <value>org.apache.ranger.admin.client.RangerAdminRESTClient</value>\n </property>\n\n</configuration>\n")),(0,i.kt)("p",null,"Create ",(0,i.kt)("inlineCode",{parentName:"p"},"ranger-spark-audit.xml")," in ",(0,i.kt)("inlineCode",{parentName:"p"},"$SPARK_HOME/conf")," and add the following configurations\nto enable/disable auditing."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-xml"},"<configuration>\n\n <property>\n <name>xasecure.audit.is.enabled</name>\n <value>true</value>\n </property>\n\n <property>\n <name>xasecure.audit.destination.db</name>\n <value>false</value>\n </property>\n\n <property>\n <name>xasecure.audit.destination.db.jdbc.driver</name>\n <value>com.mysql.jdbc.Driver</value>\n </property>\n\n <property>\n <name>xasecure.audit.destination.db.jdbc.url</name>\n <value>jdbc:mysql://10.171.161.78/ranger</value>\n </property>\n\n <property>\n <name>xasecure.audit.destination.db.password</name>\n <value>rangeradmin</value>\n </property>\n\n <property>\n <name>xasecure.audit.destination.db.user</name>\n <value>rangeradmin</value>\n </property>\n\n</configuration>\n\n")),(0,i.kt)("h4",{id:"settings-for-apache-spark"},"Settings for Apache Spark"),(0,i.kt)("p",null,"You can configure ",(0,i.kt)("inlineCode",{parentName:"p"},"spark.sql.extensions")," with the ",(0,i.kt)("inlineCode",{parentName:"p"},"*Extension")," we provided.\nFor example, ",(0,i.kt)("inlineCode",{parentName:"p"},"spark.sql.extensions=org.apache.submarine.spark.security.api.RangerSparkAuthzExtension")),(0,i.kt)("p",null,"Currently, you can set the following options to ",(0,i.kt)("inlineCode",{parentName:"p"},"spark.sql.extensions")," to choose authorization w/ or w/o\nextra functions."),(0,i.kt)("table",null,(0,i.kt)("thead",{parentName:"table"},(0,i.kt)("tr",{parentName:"thead"},(0,i.kt)("th",{parentName:"tr",align:null},"option"),(0,i.kt)("th",{parentName:"tr",align:null},"authorization"),(0,i.kt)("th",{parentName:"tr",align:null},"row filtering"),(0,i.kt)("th",{parentName:"tr",align:null},"data masking"))),(0,i.kt)("tbody",{parentName:"table"},(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:null},"org.apache.submarine.spark.security.api.RangerSparkAuthzExtension"),(0,i.kt)("td",{parentName:"tr",align:null},"\u221a"),(0,i.kt)("td",{parentName:"tr",align:null},"\xd7"),(0,i.kt)("td",{parentName:"tr",align:null},"\xd7")),(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:null},"org.apache.submarine.spark.security.api.RangerSparkSQLExtension"),(0,i.kt)("td",{parentName:"tr",align:null},"\u221a"),(0,i.kt)("td",{parentName:"tr",align:null},"\u221a"),(0,i.kt)("td",{parentName:"tr",align:null},"\u221a")))))}m.isMDXComponent=!0}}]);