blob: 1fbf86315ab1251dc62802153d70b1a7b271e5da [file] [log] [blame]
<?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<application>
<type>HBASE_AUDIT_LOG_MONITOR</type>
<name>Hbase Audit Log Monitor</name>
<appClass>org.apache.eagle.security.hbase.HBaseAuditLogApplication</appClass>
<configuration>
<property>
<name>dataSourceConfig.topic</name>
<displayName>dataSourceConfig.topic</displayName>
<value>hbase_audit_log_${siteId}</value>
<description>data source topic</description>
</property>
<property>
<name>dataSourceConfig.zkConnection</name>
<displayName>dataSourceConfig.zkConnection</displayName>
<value>server.eagle.apache.org</value>
<description>kafka broker zookeeper</description>
<required>true</required>
</property>
<property>
<name>dataSourceConfig.zkConnectionTimeoutMS</name>
<displayName>dataSourceConfig.zkConnectionTimeoutMS</displayName>
<value>15000</value>
<description>zk connection timeout in milliseconds</description>
</property>
<property>
<name>dataSourceConfig.fetchSize</name>
<displayName>dataSourceConfig.fetchSize</displayName>
<value>1048586</value>
<description>kafka fetch size</description>
</property>
<property>
<name>dataSourceConfig.transactionZKRoot</name>
<displayName>dataSourceConfig.transactionZKRoot</displayName>
<value>/consumers</value>
<description>spout offset transaction root</description>
</property>
<property>
<name>dataSourceConfig.consumerGroupId</name>
<displayName>dataSourceConfig.consumerGroupId</displayName>
<value>eagle.hbaseaudit.consumer</value>
<description>kafka consumer group Id</description>
</property>
<property>
<name>dataSourceConfig.transactionStateUpdateMS</name>
<displayName>dataSourceConfig.transactionStateUpdateMS</displayName>
<value>2000</value>
<description>zk upate</description>
</property>
<property>
<name>dataSourceConfig.schemeCls</name>
<displayName>dataSourceConfig.schemeCls</displayName>
<value>storm.kafka.StringScheme</value>
<description>scheme class</description>
</property>
<property>
<name>topology.numOfSpoutTasks</name>
<displayName>topology.numOfSpoutTasks</displayName>
<value>2</value>
<description>number of spout tasks</description>
</property>
<property>
<name>topology.numOfParserTasks</name>
<displayName>topology.numOfParserTasks</displayName>
<value>2</value>
<description>number of parser tasks</description>
</property>
<property>
<name>topology.numOfJoinTasks</name>
<displayName>topology.numOfJoinTasks</displayName>
<value>2</value>
<description>number of external join tasks</description>
</property>
<property>
<name>topology.numOfSinkTasks</name>
<displayName>topology.numOfSinkTasks</displayName>
<value>2</value>
<description>number of sink tasks</description>
</property>
<property>
<name>dataEnrich.dataJoinPollIntervalSec</name>
<displayName>Data Join Poll Interval Sec</displayName>
<value>30</value>
<description>interval in seconds for polling</description>
</property>
<property>
<name>dataSinkConfig.topic</name>
<displayName>dataSinkConfig.topic</displayName>
<value>hbase_audit_event_${siteId}</value>
<description>topic for kafka data sink</description>
</property>
<property>
<name>dataSinkConfig.brokerList</name>
<displayName>dataSinkConfig.brokerList</displayName>
<value>sandbox.hortonworks.com:6667</value>
<description>kafka broker list</description>
<required>true</required>
</property>
<property>
<name>dataSinkConfig.serializerClass</name>
<displayName>dataSinkConfig.serializerClass</displayName>
<value>kafka.serializer.StringEncoder</value>
<description>serializer class Kafka message value</description>
</property>
<property>
<name>dataSinkConfig.keySerializerClass</name>
<displayName>dataSinkConfig.keySerializerClass</displayName>
<value>kafka.serializer.StringEncoder</value>
<description>serializer class Kafka message key</description>
</property>
<!-- properties for attribute resolver-->
<property>
<name>hbase.zookeeper.quorum</name>
<displayName>hbase.zookeeper.quorum</displayName>
<value>server.eagle.apache.org</value>
<description>hbase zookeeper endpoint host</description>
</property>
<property>
<name>hbase.zookeeper.property.clientPort</name>
<displayName>hbase.zookeeper.property.clientPort</displayName>
<value>2181</value>
<description>hbase zookeeper endpoint port</description>
</property>
<property>
<name>zookeeper.znode.parent</name>
<displayName>zookeeper.znode.parent</displayName>
<value>/hbase-unsecure</value>
<description>zookeeper znode parent for hbase</description>
</property>
</configuration>
<streams>
<stream>
<streamId>hbase_audit_log_stream</streamId>
<description>Hbase Audit Log Stream</description>
<validate>true</validate>
<timeseries>true</timeseries>
<columns>
<column>
<name>sensitivityType</name>
<type>string</type>
</column>
<column>
<name>scope</name>
<type>string</type>
</column>
<column>
<name>user</name>
<type>string</type>
</column>
<column>
<name>request</name>
<type>string</type>
</column>
<column>
<name>action</name>
<type>string</type>
</column>
<column>
<name>host</name>
<type>string</type>
</column>
<column>
<name>status</name>
<type>string</type>
</column>
<column>
<name>timestamp</name>
<type>long</type>
</column>
</columns>
</stream>
</streams>
<docs>
<install>
<b>How to Install</b>
<ol>
<li>Create two kafka topics: <code>hbase_audit_log_{SITE_ID}, hbase_audit_log_enriched_{SITE_ID}</code></li>
<li>Setup a log collecting tool you like to stream audit log into topic <code>hbase_audit_log_{SITE_ID}</code></li>
<li>Click "Install" button and edit configurations in general and advanced lists according to your requirements </li>
<li>Check the new generated stream <code>HBASE_AUDIT_LOG_ENRICHED_STREAM_{SITE_ID}</code> at Alert -> Streams</li>
</ol>
</install>
<uninstall>
<b>How to Uninstall</b>
<ol>
<li>Click "Stop" button to stop the running application</li>
<li>Remove three kafka topics</li>
<li>Click "Uninstall" button which will remove stream <code>HBASE_AUDIT_LOG_ENRICHED_STREAM_{SITE_ID}</code></li>
</ol>
</uninstall>
</docs>
</application>